repo_id stringclasses 875
values | size int64 974 38.9k | file_path stringlengths 10 308 | content stringlengths 974 38.9k |
|---|---|---|---|
apache/hop | 35,108 | plugins/transforms/fuzzymatch/src/main/java/org/apache/hop/pipeline/transforms/fuzzymatch/FuzzyMatchDialog.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hop.pipeline.transforms.fuzzymatch;
import static org.apache.hop.pipeline.transforms.fuzzymatch.FuzzyMatchMeta.Algorithm.DAMERAU_LEVENSHTEIN;
import static org.apache.hop.pipeline.transforms.fuzzymatch.FuzzyMatchMeta.Algorithm.DOUBLE_METAPHONE;
import static org.apache.hop.pipeline.transforms.fuzzymatch.FuzzyMatchMeta.Algorithm.JARO;
import static org.apache.hop.pipeline.transforms.fuzzymatch.FuzzyMatchMeta.Algorithm.JARO_WINKLER;
import static org.apache.hop.pipeline.transforms.fuzzymatch.FuzzyMatchMeta.Algorithm.LEVENSHTEIN;
import static org.apache.hop.pipeline.transforms.fuzzymatch.FuzzyMatchMeta.Algorithm.METAPHONE;
import static org.apache.hop.pipeline.transforms.fuzzymatch.FuzzyMatchMeta.Algorithm.NEEDLEMAN_WUNSH;
import static org.apache.hop.pipeline.transforms.fuzzymatch.FuzzyMatchMeta.Algorithm.PAIR_SIMILARITY;
import static org.apache.hop.pipeline.transforms.fuzzymatch.FuzzyMatchMeta.Algorithm.REFINED_SOUNDEX;
import static org.apache.hop.pipeline.transforms.fuzzymatch.FuzzyMatchMeta.Algorithm.SOUNDEX;
import static org.apache.hop.pipeline.transforms.fuzzymatch.FuzzyMatchMeta.Algorithm.getDescriptions;
import static org.apache.hop.pipeline.transforms.fuzzymatch.FuzzyMatchMeta.Algorithm.lookupDescription;
import java.util.ArrayList;
import java.util.List;
import org.apache.hop.core.Const;
import org.apache.hop.core.Props;
import org.apache.hop.core.exception.HopException;
import org.apache.hop.core.row.IRowMeta;
import org.apache.hop.core.util.Utils;
import org.apache.hop.core.variables.IVariables;
import org.apache.hop.i18n.BaseMessages;
import org.apache.hop.pipeline.PipelineMeta;
import org.apache.hop.pipeline.transform.TransformMeta;
import org.apache.hop.pipeline.transform.stream.IStream;
import org.apache.hop.ui.core.ConstUi;
import org.apache.hop.ui.core.PropsUi;
import org.apache.hop.ui.core.dialog.BaseDialog;
import org.apache.hop.ui.core.dialog.ErrorDialog;
import org.apache.hop.ui.core.dialog.MessageBox;
import org.apache.hop.ui.core.gui.GuiResource;
import org.apache.hop.ui.core.widget.ColumnInfo;
import org.apache.hop.ui.core.widget.ComboVar;
import org.apache.hop.ui.core.widget.TableView;
import org.apache.hop.ui.core.widget.TextVar;
import org.apache.hop.ui.pipeline.transform.BaseTransformDialog;
import org.eclipse.swt.SWT;
import org.eclipse.swt.custom.CCombo;
import org.eclipse.swt.custom.CTabFolder;
import org.eclipse.swt.custom.CTabItem;
import org.eclipse.swt.layout.FormAttachment;
import org.eclipse.swt.layout.FormData;
import org.eclipse.swt.layout.FormLayout;
import org.eclipse.swt.widgets.Button;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Group;
import org.eclipse.swt.widgets.Label;
import org.eclipse.swt.widgets.Shell;
import org.eclipse.swt.widgets.TableItem;
import org.eclipse.swt.widgets.Text;
public class FuzzyMatchDialog extends BaseTransformDialog {
private static final Class<?> PKG = FuzzyMatchMeta.class;
private CCombo wTransform;
private CCombo wAlgorithm;
private ComboVar wMainStreamField;
private ComboVar wLookupField;
private ColumnInfo[] ciReturn;
private Label wlReturn;
private TableView wReturn;
private TextVar wMatchField;
private Label wlValueField;
private TextVar wValueField;
private Label wlCaseSensitive;
private Button wCaseSensitive;
private Label wlGetCloserValue;
private Button wGetCloserValue;
private Label wlMinValue;
private TextVar wMinValue;
private Label wlMaxValue;
private TextVar wMaxValue;
private Label wlSeparator;
private TextVar wSeparator;
private Button wGetLookup;
private final FuzzyMatchMeta input;
private boolean gotPreviousFields = false;
private boolean gotLookupFields = false;
public FuzzyMatchDialog(
Shell parent, IVariables variables, FuzzyMatchMeta transformMeta, PipelineMeta pipelineMeta) {
super(parent, variables, transformMeta, pipelineMeta);
input = transformMeta;
}
@Override
public String open() {
Shell parent = getParent();
shell = new Shell(parent, SWT.DIALOG_TRIM | SWT.RESIZE | SWT.MAX | SWT.MIN);
PropsUi.setLook(shell);
setShellImage(shell, input);
FormLayout formLayout = new FormLayout();
formLayout.marginWidth = PropsUi.getFormMargin();
formLayout.marginHeight = PropsUi.getFormMargin();
shell.setLayout(formLayout);
shell.setText(BaseMessages.getString(PKG, "FuzzyMatchDialog.Shell.Title"));
int middle = props.getMiddlePct();
int margin = PropsUi.getMargin();
// TransformName line
wlTransformName = new Label(shell, SWT.RIGHT);
wlTransformName.setText(BaseMessages.getString(PKG, "FuzzyMatchDialog.TransformName.Label"));
PropsUi.setLook(wlTransformName);
fdlTransformName = new FormData();
fdlTransformName.left = new FormAttachment(0, 0);
fdlTransformName.right = new FormAttachment(middle, -margin);
fdlTransformName.top = new FormAttachment(0, margin);
wlTransformName.setLayoutData(fdlTransformName);
wTransformName = new Text(shell, SWT.SINGLE | SWT.LEFT | SWT.BORDER);
wTransformName.setText(transformName);
PropsUi.setLook(wTransformName);
fdTransformName = new FormData();
fdTransformName.left = new FormAttachment(middle, 0);
fdTransformName.top = new FormAttachment(0, margin);
fdTransformName.right = new FormAttachment(100, 0);
wTransformName.setLayoutData(fdTransformName);
CTabFolder wTabFolder = new CTabFolder(shell, SWT.BORDER);
PropsUi.setLook(wTabFolder, Props.WIDGET_STYLE_TAB);
// ////////////////////////
// START OF General TAB ///
// ////////////////////////
CTabItem wGeneralTab = new CTabItem(wTabFolder, SWT.NONE);
wGeneralTab.setFont(GuiResource.getInstance().getFontDefault());
wGeneralTab.setText(BaseMessages.getString(PKG, "FuzzyMatchDialog.General.Tab"));
Composite wGeneralComp = new Composite(wTabFolder, SWT.NONE);
PropsUi.setLook(wGeneralComp);
FormLayout generalLayout = new FormLayout();
generalLayout.marginWidth = 3;
generalLayout.marginHeight = 3;
wGeneralComp.setLayout(generalLayout);
// /////////////////////////////////
// START OF Lookup Fields GROUP
// /////////////////////////////////
Group wLookupGroup = new Group(wGeneralComp, SWT.SHADOW_NONE);
PropsUi.setLook(wLookupGroup);
wLookupGroup.setText(BaseMessages.getString(PKG, "FuzzyMatchDialog.Group.Lookup.Label"));
FormLayout lookupGroupLayout = new FormLayout();
lookupGroupLayout.marginWidth = 10;
lookupGroupLayout.marginHeight = 10;
wLookupGroup.setLayout(lookupGroupLayout);
// Source transform line...
Label wlTransform = new Label(wLookupGroup, SWT.RIGHT);
wlTransform.setText(BaseMessages.getString(PKG, "FuzzyMatchDialog.SourceTransform.Label"));
PropsUi.setLook(wlTransform);
FormData fdlTransform = new FormData();
fdlTransform.left = new FormAttachment(0, 0);
fdlTransform.right = new FormAttachment(middle, -margin);
fdlTransform.top = new FormAttachment(wTransformName, margin);
wlTransform.setLayoutData(fdlTransform);
wTransform = new CCombo(wLookupGroup, SWT.SINGLE | SWT.LEFT | SWT.BORDER);
PropsUi.setLook(wTransform);
List<TransformMeta> transforms =
pipelineMeta.findPreviousTransforms(pipelineMeta.findTransform(transformName), true);
for (TransformMeta transformMeta : transforms) {
wTransform.add(transformMeta.getName());
}
wTransform.addListener(SWT.Selection, e -> setComboBoxesLookup());
FormData fdTransform = new FormData();
fdTransform.left = new FormAttachment(middle, 0);
fdTransform.top = new FormAttachment(wTransformName, margin);
fdTransform.right = new FormAttachment(100, 0);
wTransform.setLayoutData(fdTransform);
// LookupField
Label wlLookupField = new Label(wLookupGroup, SWT.RIGHT);
wlLookupField.setText(BaseMessages.getString(PKG, "FuzzyMatchDialog.wlLookupField.Label"));
PropsUi.setLook(wlLookupField);
FormData fdlLookupField = new FormData();
fdlLookupField.left = new FormAttachment(0, 0);
fdlLookupField.top = new FormAttachment(wTransform, margin);
fdlLookupField.right = new FormAttachment(middle, -margin);
wlLookupField.setLayoutData(fdlLookupField);
wLookupField = new ComboVar(variables, wLookupGroup, SWT.BORDER | SWT.READ_ONLY);
wLookupField.setEditable(true);
PropsUi.setLook(wLookupField);
FormData fdLookupField = new FormData();
fdLookupField.left = new FormAttachment(middle, 0);
fdLookupField.top = new FormAttachment(wTransform, margin);
fdLookupField.right = new FormAttachment(100, -margin);
wLookupField.setLayoutData(fdLookupField);
wLookupField.addListener(SWT.FocusIn, e -> setLookupField());
FormData fdLookupGroup = new FormData();
fdLookupGroup.left = new FormAttachment(0, margin);
fdLookupGroup.top = new FormAttachment(wTransformName, margin);
fdLookupGroup.right = new FormAttachment(100, -margin);
wLookupGroup.setLayoutData(fdLookupGroup);
// ///////////////////////////////////////////////////////////
// / END OF Lookup GROUP
// ///////////////////////////////////////////////////////////
// /////////////////////////////////
// START OF MainStream Fields GROUP
// /////////////////////////////////
Group wMainStreamGroup = new Group(wGeneralComp, SWT.SHADOW_NONE);
PropsUi.setLook(wMainStreamGroup);
wMainStreamGroup.setText(
BaseMessages.getString(PKG, "FuzzyMatchDialog.Group.MainStreamGroup.Label"));
FormLayout mainStreamGroupLayout = new FormLayout();
mainStreamGroupLayout.marginWidth = 10;
mainStreamGroupLayout.marginHeight = 10;
wMainStreamGroup.setLayout(mainStreamGroupLayout);
// MainStreamFieldName field
Label wlMainStreamField = new Label(wMainStreamGroup, SWT.RIGHT);
wlMainStreamField.setText(
BaseMessages.getString(PKG, "FuzzyMatchDialog.wlMainStreamField.Label"));
PropsUi.setLook(wlMainStreamField);
FormData fdlMainStreamField = new FormData();
fdlMainStreamField.left = new FormAttachment(0, 0);
fdlMainStreamField.top = new FormAttachment(wLookupGroup, margin);
fdlMainStreamField.right = new FormAttachment(middle, -margin);
wlMainStreamField.setLayoutData(fdlMainStreamField);
wMainStreamField = new ComboVar(variables, wMainStreamGroup, SWT.BORDER | SWT.READ_ONLY);
wMainStreamField.setEditable(true);
PropsUi.setLook(wMainStreamField);
FormData fdMainStreamField = new FormData();
fdMainStreamField.left = new FormAttachment(middle, 0);
fdMainStreamField.top = new FormAttachment(wLookupGroup, margin);
fdMainStreamField.right = new FormAttachment(100, -margin);
wMainStreamField.setLayoutData(fdMainStreamField);
wMainStreamField.addListener(SWT.FocusIn, e -> setMainStreamField());
FormData fdMainStreamGroup = new FormData();
fdMainStreamGroup.left = new FormAttachment(0, margin);
fdMainStreamGroup.top = new FormAttachment(wLookupGroup, margin);
fdMainStreamGroup.right = new FormAttachment(100, -margin);
wMainStreamGroup.setLayoutData(fdMainStreamGroup);
// ///////////////////////////////////////////////////////////
// / END OF MainStream GROUP
// ///////////////////////////////////////////////////////////
// /////////////////////////////////
// START OF Settings Fields GROUP
// /////////////////////////////////
Group wSettingsGroup = new Group(wGeneralComp, SWT.SHADOW_NONE);
PropsUi.setLook(wSettingsGroup);
wSettingsGroup.setText(
BaseMessages.getString(PKG, "FuzzyMatchDialog.Group.SettingsGroup.Label"));
FormLayout settingsGroupLayout = new FormLayout();
settingsGroupLayout.marginWidth = 10;
settingsGroupLayout.marginHeight = 10;
wSettingsGroup.setLayout(settingsGroupLayout);
// Algorithm
Label wlAlgorithm = new Label(wSettingsGroup, SWT.RIGHT);
wlAlgorithm.setText(BaseMessages.getString(PKG, "FuzzyMatchDialog.Algorithm.Label"));
PropsUi.setLook(wlAlgorithm);
FormData fdlAlgorithm = new FormData();
fdlAlgorithm.left = new FormAttachment(0, 0);
fdlAlgorithm.right = new FormAttachment(middle, -margin);
fdlAlgorithm.top = new FormAttachment(wMainStreamGroup, margin);
wlAlgorithm.setLayoutData(fdlAlgorithm);
wAlgorithm = new CCombo(wSettingsGroup, SWT.BORDER | SWT.READ_ONLY);
PropsUi.setLook(wAlgorithm);
FormData fdAlgorithm = new FormData();
fdAlgorithm.left = new FormAttachment(middle, 0);
fdAlgorithm.top = new FormAttachment(wMainStreamGroup, margin);
fdAlgorithm.right = new FormAttachment(100, -margin);
wAlgorithm.setLayoutData(fdAlgorithm);
wAlgorithm.setItems(getDescriptions());
wAlgorithm.addListener(SWT.Selection, e -> activeAlgorithm());
// Is case-sensitive
wlCaseSensitive = new Label(wSettingsGroup, SWT.RIGHT);
wlCaseSensitive.setText(BaseMessages.getString(PKG, "FuzzyMatchDialog.caseSensitive.Label"));
PropsUi.setLook(wlCaseSensitive);
FormData fdlCaseSensitive = new FormData();
fdlCaseSensitive.left = new FormAttachment(0, 0);
fdlCaseSensitive.top = new FormAttachment(wAlgorithm, margin);
fdlCaseSensitive.right = new FormAttachment(middle, -margin);
wlCaseSensitive.setLayoutData(fdlCaseSensitive);
wCaseSensitive = new Button(wSettingsGroup, SWT.CHECK);
PropsUi.setLook(wCaseSensitive);
wCaseSensitive.setToolTipText(
BaseMessages.getString(PKG, "FuzzyMatchDialog.caseSensitive.Tooltip"));
FormData fdcaseSensitive = new FormData();
fdcaseSensitive.left = new FormAttachment(middle, 0);
fdcaseSensitive.top = new FormAttachment(wlCaseSensitive, 0, SWT.CENTER);
wCaseSensitive.setLayoutData(fdcaseSensitive);
// Is get closer value
wlGetCloserValue = new Label(wSettingsGroup, SWT.RIGHT);
wlGetCloserValue.setText(BaseMessages.getString(PKG, "FuzzyMatchDialog.getCloserValue.Label"));
PropsUi.setLook(wlGetCloserValue);
FormData fdlGetCloserValue = new FormData();
fdlGetCloserValue.left = new FormAttachment(0, 0);
fdlGetCloserValue.top = new FormAttachment(wCaseSensitive, margin);
fdlGetCloserValue.right = new FormAttachment(middle, -margin);
wlGetCloserValue.setLayoutData(fdlGetCloserValue);
wGetCloserValue = new Button(wSettingsGroup, SWT.CHECK);
PropsUi.setLook(wGetCloserValue);
wGetCloserValue.setToolTipText(
BaseMessages.getString(PKG, "FuzzyMatchDialog.getCloserValue.Tooltip"));
FormData fdgetCloserValue = new FormData();
fdgetCloserValue.left = new FormAttachment(middle, 0);
fdgetCloserValue.top = new FormAttachment(wlGetCloserValue, 0, SWT.CENTER);
wGetCloserValue.setLayoutData(fdgetCloserValue);
wGetCloserValue.addListener(SWT.Selection, e -> activeGetCloserValue());
wlMinValue = new Label(wSettingsGroup, SWT.RIGHT);
wlMinValue.setText(BaseMessages.getString(PKG, "FuzzyMatchDialog.minValue.Label"));
PropsUi.setLook(wlMinValue);
FormData fdlminValue = new FormData();
fdlminValue.left = new FormAttachment(0, 0);
fdlminValue.top = new FormAttachment(wGetCloserValue, margin);
fdlminValue.right = new FormAttachment(middle, -margin);
wlMinValue.setLayoutData(fdlminValue);
wMinValue = new TextVar(variables, wSettingsGroup, SWT.SINGLE | SWT.LEFT | SWT.BORDER);
PropsUi.setLook(wMinValue);
wMinValue.setToolTipText(BaseMessages.getString(PKG, "FuzzyMatchDialog.minValue.Tooltip"));
FormData fdminValue = new FormData();
fdminValue.left = new FormAttachment(middle, 0);
fdminValue.top = new FormAttachment(wGetCloserValue, margin);
fdminValue.right = new FormAttachment(100, 0);
wMinValue.setLayoutData(fdminValue);
wlMaxValue = new Label(wSettingsGroup, SWT.RIGHT);
wlMaxValue.setText(BaseMessages.getString(PKG, "FuzzyMatchDialog.maxValue.Label"));
PropsUi.setLook(wlMaxValue);
FormData fdlmaxValue = new FormData();
fdlmaxValue.left = new FormAttachment(0, 0);
fdlmaxValue.top = new FormAttachment(wMinValue, margin);
fdlmaxValue.right = new FormAttachment(middle, -margin);
wlMaxValue.setLayoutData(fdlmaxValue);
wMaxValue = new TextVar(variables, wSettingsGroup, SWT.SINGLE | SWT.LEFT | SWT.BORDER);
PropsUi.setLook(wMaxValue);
wMaxValue.setToolTipText(BaseMessages.getString(PKG, "FuzzyMatchDialog.maxValue.Tooltip"));
FormData fdmaxValue = new FormData();
fdmaxValue.left = new FormAttachment(middle, 0);
fdmaxValue.top = new FormAttachment(wMinValue, margin);
fdmaxValue.right = new FormAttachment(100, 0);
wMaxValue.setLayoutData(fdmaxValue);
wlSeparator = new Label(wSettingsGroup, SWT.RIGHT);
wlSeparator.setText(BaseMessages.getString(PKG, "FuzzyMatchDialog.separator.Label"));
PropsUi.setLook(wlSeparator);
FormData fdlSeparator = new FormData();
fdlSeparator.left = new FormAttachment(0, 0);
fdlSeparator.top = new FormAttachment(wMaxValue, margin);
fdlSeparator.right = new FormAttachment(middle, -margin);
wlSeparator.setLayoutData(fdlSeparator);
wSeparator = new TextVar(variables, wSettingsGroup, SWT.SINGLE | SWT.LEFT | SWT.BORDER);
PropsUi.setLook(wSeparator);
FormData fdSeparator = new FormData();
fdSeparator.left = new FormAttachment(middle, 0);
fdSeparator.top = new FormAttachment(wMaxValue, margin);
fdSeparator.right = new FormAttachment(100, 0);
wSeparator.setLayoutData(fdSeparator);
FormData fdSettingsGroup = new FormData();
fdSettingsGroup.left = new FormAttachment(0, margin);
fdSettingsGroup.top = new FormAttachment(wMainStreamGroup, margin);
fdSettingsGroup.right = new FormAttachment(100, -margin);
wSettingsGroup.setLayoutData(fdSettingsGroup);
// ///////////////////////////////////////////////////////////
// / END OF Settings GROUP
// ///////////////////////////////////////////////////////////
FormData fdGeneralComp = new FormData();
fdGeneralComp.left = new FormAttachment(0, 0);
fdGeneralComp.top = new FormAttachment(0, 0);
fdGeneralComp.right = new FormAttachment(100, 0);
fdGeneralComp.bottom = new FormAttachment(100, 0);
wGeneralComp.setLayoutData(fdGeneralComp);
wGeneralComp.layout();
wGeneralTab.setControl(wGeneralComp);
// ///////////////////////////////////////////////////////////
// / END OF General TAB
// ///////////////////////////////////////////////////////////
// The buttons go at the bottom
//
wOk = new Button(shell, SWT.PUSH);
wOk.setText(BaseMessages.getString(PKG, "System.Button.OK"));
wOk.addListener(SWT.Selection, e -> ok());
wCancel = new Button(shell, SWT.PUSH);
wCancel.setText(BaseMessages.getString(PKG, "System.Button.Cancel"));
wCancel.addListener(SWT.Selection, e -> cancel());
setButtonPositions(new Button[] {wOk, wCancel}, margin, null);
// ////////////////////////
// START OF Fields TAB ///
// ////////////////////////
CTabItem wFieldsTab = new CTabItem(wTabFolder, SWT.NONE);
wFieldsTab.setFont(GuiResource.getInstance().getFontDefault());
wFieldsTab.setText(BaseMessages.getString(PKG, "FuzzyMatchDialog.Fields.Tab"));
Composite wFieldsComp = new Composite(wTabFolder, SWT.NONE);
PropsUi.setLook(wFieldsComp);
FormLayout fieldsLayout = new FormLayout();
fieldsLayout.marginWidth = 3;
fieldsLayout.marginHeight = 3;
wFieldsComp.setLayout(fieldsLayout);
// /////////////////////////////////
// START OF OutputFields Fields GROUP
// /////////////////////////////////
Group wOutputFieldsGroup = new Group(wFieldsComp, SWT.SHADOW_NONE);
PropsUi.setLook(wOutputFieldsGroup);
wOutputFieldsGroup.setText(
BaseMessages.getString(PKG, "FuzzyMatchDialog.Group.OutputFieldsGroup.Label"));
FormLayout outputFieldsGroupLayout = new FormLayout();
outputFieldsGroupLayout.marginWidth = 10;
outputFieldsGroupLayout.marginHeight = 10;
wOutputFieldsGroup.setLayout(outputFieldsGroupLayout);
Label wlMatchField = new Label(wOutputFieldsGroup, SWT.RIGHT);
wlMatchField.setText(BaseMessages.getString(PKG, "FuzzyMatchDialog.MatchField.Label"));
PropsUi.setLook(wlMatchField);
FormData fdlMatchField = new FormData();
fdlMatchField.left = new FormAttachment(0, 0);
fdlMatchField.top = new FormAttachment(wSettingsGroup, margin);
fdlMatchField.right = new FormAttachment(middle, -margin);
wlMatchField.setLayoutData(fdlMatchField);
wMatchField = new TextVar(variables, wOutputFieldsGroup, SWT.SINGLE | SWT.LEFT | SWT.BORDER);
PropsUi.setLook(wMatchField);
FormData fdMatchField = new FormData();
fdMatchField.left = new FormAttachment(middle, 0);
fdMatchField.top = new FormAttachment(wSettingsGroup, margin);
fdMatchField.right = new FormAttachment(100, 0);
wMatchField.setLayoutData(fdMatchField);
wlValueField = new Label(wOutputFieldsGroup, SWT.RIGHT);
wlValueField.setText(BaseMessages.getString(PKG, "FuzzyMatchDialog.valueField.Label"));
PropsUi.setLook(wlValueField);
FormData fdlValueField = new FormData();
fdlValueField.left = new FormAttachment(0, 0);
fdlValueField.top = new FormAttachment(wMatchField, margin);
fdlValueField.right = new FormAttachment(middle, -margin);
wlValueField.setLayoutData(fdlValueField);
wValueField = new TextVar(variables, wOutputFieldsGroup, SWT.SINGLE | SWT.LEFT | SWT.BORDER);
PropsUi.setLook(wValueField);
wValueField.setToolTipText(BaseMessages.getString(PKG, "FuzzyMatchDialog.valueField.Tooltip"));
FormData fdValueField = new FormData();
fdValueField.left = new FormAttachment(middle, 0);
fdValueField.top = new FormAttachment(wMatchField, margin);
fdValueField.right = new FormAttachment(100, 0);
wValueField.setLayoutData(fdValueField);
FormData fdOutputFieldsGroup = new FormData();
fdOutputFieldsGroup.left = new FormAttachment(0, margin);
fdOutputFieldsGroup.top = new FormAttachment(wSettingsGroup, margin);
fdOutputFieldsGroup.right = new FormAttachment(100, -margin);
wOutputFieldsGroup.setLayoutData(fdOutputFieldsGroup);
// ///////////////////////////////////////////////////////////
// / END OF OutputFields GROUP
// ///////////////////////////////////////////////////////////
// THE UPDATE/INSERT TABLE
wlReturn = new Label(wFieldsComp, SWT.NONE);
wlReturn.setText(BaseMessages.getString(PKG, "FuzzyMatchDialog.ReturnFields.Label"));
PropsUi.setLook(wlReturn);
FormData fdlReturn = new FormData();
fdlReturn.left = new FormAttachment(0, 0);
fdlReturn.top = new FormAttachment(wOutputFieldsGroup, margin);
wlReturn.setLayoutData(fdlReturn);
wGetLookup = new Button(wFieldsComp, SWT.PUSH);
wGetLookup.setText(BaseMessages.getString(PKG, "FuzzyMatchDialog.GetLookupFields.Button"));
FormData fdlGetLookup = new FormData();
fdlGetLookup.top = new FormAttachment(wlReturn, margin);
fdlGetLookup.right = new FormAttachment(100, 0);
wGetLookup.setLayoutData(fdlGetLookup);
wGetLookup.addListener(SWT.Selection, e -> getlookup());
int upInsCols = 2;
int upInsRows = input.getLookupValues().size();
ciReturn = new ColumnInfo[upInsCols];
ciReturn[0] =
new ColumnInfo(
BaseMessages.getString(PKG, "FuzzyMatchDialog.ColumnInfo.FieldReturn"),
ColumnInfo.COLUMN_TYPE_CCOMBO,
new String[] {""},
false);
ciReturn[1] =
new ColumnInfo(
BaseMessages.getString(PKG, "FuzzyMatchDialog.ColumnInfo.NewName"),
ColumnInfo.COLUMN_TYPE_TEXT,
false);
wReturn =
new TableView(
variables,
wFieldsComp,
SWT.BORDER | SWT.FULL_SELECTION | SWT.MULTI | SWT.V_SCROLL | SWT.H_SCROLL,
ciReturn,
upInsRows,
null,
props);
FormData fdReturn = new FormData();
fdReturn.left = new FormAttachment(0, 0);
fdReturn.top = new FormAttachment(wlReturn, margin);
fdReturn.right = new FormAttachment(wGetLookup, -margin);
fdReturn.bottom = new FormAttachment(100, -3 * margin);
wReturn.setLayoutData(fdReturn);
FormData fdFieldsComp = new FormData();
fdFieldsComp.left = new FormAttachment(0, 0);
fdFieldsComp.top = new FormAttachment(0, 0);
fdFieldsComp.right = new FormAttachment(100, 0);
fdFieldsComp.bottom = new FormAttachment(100, 0);
wFieldsComp.setLayoutData(fdFieldsComp);
wFieldsComp.layout();
wFieldsTab.setControl(wFieldsComp);
// ///////////////////////////////////////////////////////////
// / END OF Fields TAB
// ///////////////////////////////////////////////////////////
FormData fdTabFolder = new FormData();
fdTabFolder.left = new FormAttachment(0, 0);
fdTabFolder.top = new FormAttachment(wTransformName, margin);
fdTabFolder.right = new FormAttachment(100, 0);
fdTabFolder.bottom = new FormAttachment(wOk, -margin);
wTabFolder.setLayoutData(fdTabFolder);
wTabFolder.setSelection(0);
getData();
setComboBoxesLookup();
activeAlgorithm();
activeGetCloserValue();
BaseDialog.defaultShellHandling(shell, c -> ok(), c -> cancel());
return transformName;
}
/** Copy information from the meta-data input to the dialog fields. */
public void getData() {
if (isDebug()) {
logDebug(BaseMessages.getString(PKG, "FuzzyMatchDialog.Log.GettingKeyInfo"));
}
if (input.getAlgorithm() != null) {
wAlgorithm.setText(input.getAlgorithm().getDescription());
}
wMainStreamField.setText(Const.NVL(input.getMainStreamField(), ""));
wLookupField.setText(Const.NVL(input.getLookupField(), ""));
wCaseSensitive.setSelection(input.isCaseSensitive());
wGetCloserValue.setSelection(input.isCloserValue());
wMinValue.setText(Const.NVL(input.getMinimalValue(), ""));
wMaxValue.setText(Const.NVL(input.getMaximalValue(), ""));
wMatchField.setText(Const.NVL(input.getOutputMatchField(), ""));
wValueField.setText(Const.NVL(input.getOutputValueField(), ""));
wSeparator.setText(Const.NVL(input.getSeparator(), ""));
for (int i = 0; i < input.getLookupValues().size(); i++) {
FuzzyMatchMeta.FMLookupValue lookupValue = input.getLookupValues().get(i);
TableItem item = wReturn.table.getItem(i);
item.setText(1, Const.NVL(lookupValue.getName(), ""));
item.setText(2, Const.NVL(lookupValue.getRename(), ""));
}
IStream infoStream = input.getTransformIOMeta().getInfoStreams().get(0);
wTransform.setText(Const.NVL(infoStream.getTransformName(), ""));
wReturn.optimizeTableView();
wTransformName.selectAll();
wTransformName.setFocus();
}
private void cancel() {
transformName = null;
dispose();
}
private void ok() {
if (Utils.isEmpty(wTransformName.getText())) {
return;
}
input.setMainStreamField(wMainStreamField.getText());
input.setLookupTransformName(wTransform.getText());
input.setLookupField(wLookupField.getText());
input.setAlgorithm(lookupDescription(wAlgorithm.getText()));
input.setCaseSensitive(wCaseSensitive.getSelection());
input.setCloserValue(wGetCloserValue.getSelection());
input.setMaximalValue(wMaxValue.getText());
input.setMinimalValue(wMinValue.getText());
input.setOutputMatchField(wMatchField.getText());
input.setOutputValueField(wValueField.getText());
input.setSeparator(wSeparator.getText());
input.getLookupValues().clear();
for (TableItem item : wReturn.getNonEmptyItems()) {
FuzzyMatchMeta.FMLookupValue lookupValue = new FuzzyMatchMeta.FMLookupValue();
lookupValue.setName(item.getText(1));
lookupValue.setRename(item.getText(2));
input.getLookupValues().add(lookupValue);
}
transformName = wTransformName.getText(); // return value
input.setChanged();
dispose();
}
private void setMainStreamField() {
if (!gotPreviousFields) {
String field = wMainStreamField.getText();
try {
wMainStreamField.removeAll();
IRowMeta r = pipelineMeta.getPrevTransformFields(variables, transformName);
if (r != null) {
wMainStreamField.setItems(r.getFieldNames());
}
} catch (HopException ke) {
new ErrorDialog(
shell,
BaseMessages.getString(PKG, "FuzzyMatchDialog.FailedToGetFields.DialogTitle"),
BaseMessages.getString(PKG, "FuzzyMatchDialog.FailedToGetFields.DialogMessage"),
ke);
}
if (field != null) {
wMainStreamField.setText(field);
}
gotPreviousFields = true;
}
}
private void setLookupField() {
if (!gotLookupFields) {
String field = wLookupField.getText();
try {
wLookupField.removeAll();
IRowMeta r = pipelineMeta.getTransformFields(variables, wTransform.getText());
if (r != null) {
wLookupField.setItems(r.getFieldNames());
}
} catch (HopException ke) {
new ErrorDialog(
shell,
BaseMessages.getString(PKG, "FuzzyMatchDialog.FailedToGetLookupFields.DialogTitle"),
BaseMessages.getString(PKG, "FuzzyMatchDialog.FailedToGetLookupFields.DialogMessage"),
ke);
}
if (field != null) {
wLookupField.setText(field);
}
gotLookupFields = true;
}
}
private void activeGetCloserValue() {
FuzzyMatchMeta.Algorithm algorithm = lookupDescription(wAlgorithm.getText());
boolean enableRange =
(algorithm == LEVENSHTEIN
|| algorithm == NEEDLEMAN_WUNSH
|| algorithm == DAMERAU_LEVENSHTEIN
|| algorithm == JARO
|| algorithm == JARO_WINKLER
|| algorithm == PAIR_SIMILARITY)
&& !wGetCloserValue.getSelection();
wlSeparator.setEnabled(enableRange);
wSeparator.setEnabled(enableRange);
wlValueField.setEnabled(wGetCloserValue.getSelection());
wValueField.setEnabled(wGetCloserValue.getSelection());
activeAddFields();
}
private void activeAddFields() {
FuzzyMatchMeta.Algorithm algorithm = lookupDescription(wAlgorithm.getText());
boolean activate =
wGetCloserValue.getSelection()
|| algorithm == DOUBLE_METAPHONE
|| algorithm == SOUNDEX
|| algorithm == REFINED_SOUNDEX
|| algorithm == METAPHONE;
wlReturn.setEnabled(activate);
wReturn.setEnabled(activate);
wGetLookup.setEnabled(activate);
}
private void activeAlgorithm() {
FuzzyMatchMeta.Algorithm algorithm = lookupDescription(wAlgorithm.getText());
boolean enable =
(algorithm == LEVENSHTEIN
|| algorithm == NEEDLEMAN_WUNSH
|| algorithm == DAMERAU_LEVENSHTEIN
|| algorithm == JARO
|| algorithm == JARO_WINKLER
|| algorithm == PAIR_SIMILARITY);
wlGetCloserValue.setEnabled(enable);
wGetCloserValue.setEnabled(enable);
wlMinValue.setEnabled(enable);
wMinValue.setEnabled(enable);
wlMaxValue.setEnabled(enable);
wMaxValue.setEnabled(enable);
if (algorithm == JARO || algorithm == JARO_WINKLER || algorithm == PAIR_SIMILARITY) {
if (Const.toDouble(variables.resolve(wMinValue.getText()), 0) > 1) {
wMinValue.setText(String.valueOf(1));
}
if (Const.toDouble(variables.resolve(wMaxValue.getText()), 0) > 1) {
wMaxValue.setText(String.valueOf(1));
}
}
boolean enableCaseSensitive = (algorithm == LEVENSHTEIN || algorithm == DAMERAU_LEVENSHTEIN);
wlCaseSensitive.setEnabled(enableCaseSensitive);
wCaseSensitive.setEnabled(enableCaseSensitive);
activeGetCloserValue();
}
private void getlookup() {
try {
String transformFrom = wTransform.getText();
if (!Utils.isEmpty(transformFrom)) {
IRowMeta r = pipelineMeta.getTransformFields(variables, transformFrom);
if (r != null && !r.isEmpty()) {
BaseTransformDialog.getFieldsFromPrevious(
r, wReturn, 1, new int[] {1}, new int[] {4}, -1, -1, null);
} else {
MessageBox mb = new MessageBox(shell, SWT.OK | SWT.ICON_ERROR);
mb.setMessage(
BaseMessages.getString(PKG, "FuzzyMatchDialog.CouldNotFindFields.DialogMessage"));
mb.setText(
BaseMessages.getString(PKG, "FuzzyMatchDialog.CouldNotFindFields.DialogTitle"));
mb.open();
}
} else {
MessageBox mb = new MessageBox(shell, SWT.OK | SWT.ICON_ERROR);
mb.setMessage(
BaseMessages.getString(PKG, "FuzzyMatchDialog.TransformNameRequired.DialogMessage"));
mb.setText(
BaseMessages.getString(PKG, "FuzzyMatchDialog.TransformNameRequired.DialogTitle"));
mb.open();
}
} catch (HopException ke) {
new ErrorDialog(
shell,
BaseMessages.getString(PKG, "FuzzyMatchDialog.FailedToGetFields.DialogTitle"),
BaseMessages.getString(PKG, "FuzzyMatchDialog.FailedToGetFields.DialogMessage"),
ke);
}
}
protected void setComboBoxesLookup() {
Runnable fieldLoader =
() -> {
TransformMeta lookupTransformMeta = pipelineMeta.findTransform(wTransform.getText());
if (lookupTransformMeta != null) {
try {
IRowMeta row = pipelineMeta.getTransformFields(variables, lookupTransformMeta);
List<String> lookupFields = new ArrayList<>();
// Remember these fields...
for (int i = 0; i < row.size(); i++) {
lookupFields.add(row.getValueMeta(i).getName());
}
// Something was changed in the row.
//
String[] fieldNames = ConstUi.sortFieldNames(lookupFields);
// return fields
ciReturn[0].setComboValues(fieldNames);
} catch (HopException e) {
logError(
"It was not possible to retrieve the list of fields for transform ["
+ wTransform.getText()
+ "]!");
}
}
};
shell.getDisplay().asyncExec(fieldLoader);
}
}
|
googleapis/google-cloud-java | 34,934 | java-deploy/proto-google-cloud-deploy-v1/src/main/java/com/google/cloud/deploy/v1/PrivatePool.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/deploy/v1/cloud_deploy.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.deploy.v1;
/**
*
*
* <pre>
* Execution using a private Cloud Build pool.
* </pre>
*
* Protobuf type {@code google.cloud.deploy.v1.PrivatePool}
*/
public final class PrivatePool extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.deploy.v1.PrivatePool)
PrivatePoolOrBuilder {
private static final long serialVersionUID = 0L;
// Use PrivatePool.newBuilder() to construct.
private PrivatePool(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private PrivatePool() {
workerPool_ = "";
serviceAccount_ = "";
artifactStorage_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new PrivatePool();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.deploy.v1.CloudDeployProto
.internal_static_google_cloud_deploy_v1_PrivatePool_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.deploy.v1.CloudDeployProto
.internal_static_google_cloud_deploy_v1_PrivatePool_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.deploy.v1.PrivatePool.class,
com.google.cloud.deploy.v1.PrivatePool.Builder.class);
}
public static final int WORKER_POOL_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object workerPool_ = "";
/**
*
*
* <pre>
* Required. Resource name of the Cloud Build worker pool to use. The format
* is `projects/{project}/locations/{location}/workerPools/{pool}`.
* </pre>
*
* <code>
* string worker_pool = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The workerPool.
*/
@java.lang.Override
public java.lang.String getWorkerPool() {
java.lang.Object ref = workerPool_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
workerPool_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. Resource name of the Cloud Build worker pool to use. The format
* is `projects/{project}/locations/{location}/workerPools/{pool}`.
* </pre>
*
* <code>
* string worker_pool = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for workerPool.
*/
@java.lang.Override
public com.google.protobuf.ByteString getWorkerPoolBytes() {
java.lang.Object ref = workerPool_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
workerPool_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int SERVICE_ACCOUNT_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object serviceAccount_ = "";
/**
*
*
* <pre>
* Optional. Google service account to use for execution. If unspecified,
* the project execution service account
* (<PROJECT_NUMBER>-compute@developer.gserviceaccount.com) will be used.
* </pre>
*
* <code>string service_account = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The serviceAccount.
*/
@java.lang.Override
public java.lang.String getServiceAccount() {
java.lang.Object ref = serviceAccount_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
serviceAccount_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. Google service account to use for execution. If unspecified,
* the project execution service account
* (<PROJECT_NUMBER>-compute@developer.gserviceaccount.com) will be used.
* </pre>
*
* <code>string service_account = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for serviceAccount.
*/
@java.lang.Override
public com.google.protobuf.ByteString getServiceAccountBytes() {
java.lang.Object ref = serviceAccount_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
serviceAccount_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int ARTIFACT_STORAGE_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object artifactStorage_ = "";
/**
*
*
* <pre>
* Optional. Cloud Storage location where execution outputs should be stored.
* This can either be a bucket ("gs://my-bucket") or a path within a bucket
* ("gs://my-bucket/my-dir").
* If unspecified, a default bucket located in the same region will be used.
* </pre>
*
* <code>string artifact_storage = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The artifactStorage.
*/
@java.lang.Override
public java.lang.String getArtifactStorage() {
java.lang.Object ref = artifactStorage_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
artifactStorage_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. Cloud Storage location where execution outputs should be stored.
* This can either be a bucket ("gs://my-bucket") or a path within a bucket
* ("gs://my-bucket/my-dir").
* If unspecified, a default bucket located in the same region will be used.
* </pre>
*
* <code>string artifact_storage = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for artifactStorage.
*/
@java.lang.Override
public com.google.protobuf.ByteString getArtifactStorageBytes() {
java.lang.Object ref = artifactStorage_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
artifactStorage_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(workerPool_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, workerPool_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(serviceAccount_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, serviceAccount_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(artifactStorage_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, artifactStorage_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(workerPool_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, workerPool_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(serviceAccount_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, serviceAccount_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(artifactStorage_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, artifactStorage_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.deploy.v1.PrivatePool)) {
return super.equals(obj);
}
com.google.cloud.deploy.v1.PrivatePool other = (com.google.cloud.deploy.v1.PrivatePool) obj;
if (!getWorkerPool().equals(other.getWorkerPool())) return false;
if (!getServiceAccount().equals(other.getServiceAccount())) return false;
if (!getArtifactStorage().equals(other.getArtifactStorage())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + WORKER_POOL_FIELD_NUMBER;
hash = (53 * hash) + getWorkerPool().hashCode();
hash = (37 * hash) + SERVICE_ACCOUNT_FIELD_NUMBER;
hash = (53 * hash) + getServiceAccount().hashCode();
hash = (37 * hash) + ARTIFACT_STORAGE_FIELD_NUMBER;
hash = (53 * hash) + getArtifactStorage().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.deploy.v1.PrivatePool parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.deploy.v1.PrivatePool parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.deploy.v1.PrivatePool parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.deploy.v1.PrivatePool parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.deploy.v1.PrivatePool parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.deploy.v1.PrivatePool parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.deploy.v1.PrivatePool parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.deploy.v1.PrivatePool parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.deploy.v1.PrivatePool parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.deploy.v1.PrivatePool parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.deploy.v1.PrivatePool parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.deploy.v1.PrivatePool parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.deploy.v1.PrivatePool prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Execution using a private Cloud Build pool.
* </pre>
*
* Protobuf type {@code google.cloud.deploy.v1.PrivatePool}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.deploy.v1.PrivatePool)
com.google.cloud.deploy.v1.PrivatePoolOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.deploy.v1.CloudDeployProto
.internal_static_google_cloud_deploy_v1_PrivatePool_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.deploy.v1.CloudDeployProto
.internal_static_google_cloud_deploy_v1_PrivatePool_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.deploy.v1.PrivatePool.class,
com.google.cloud.deploy.v1.PrivatePool.Builder.class);
}
// Construct using com.google.cloud.deploy.v1.PrivatePool.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
workerPool_ = "";
serviceAccount_ = "";
artifactStorage_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.deploy.v1.CloudDeployProto
.internal_static_google_cloud_deploy_v1_PrivatePool_descriptor;
}
@java.lang.Override
public com.google.cloud.deploy.v1.PrivatePool getDefaultInstanceForType() {
return com.google.cloud.deploy.v1.PrivatePool.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.deploy.v1.PrivatePool build() {
com.google.cloud.deploy.v1.PrivatePool result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.deploy.v1.PrivatePool buildPartial() {
com.google.cloud.deploy.v1.PrivatePool result =
new com.google.cloud.deploy.v1.PrivatePool(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.deploy.v1.PrivatePool result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.workerPool_ = workerPool_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.serviceAccount_ = serviceAccount_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.artifactStorage_ = artifactStorage_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.deploy.v1.PrivatePool) {
return mergeFrom((com.google.cloud.deploy.v1.PrivatePool) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.deploy.v1.PrivatePool other) {
if (other == com.google.cloud.deploy.v1.PrivatePool.getDefaultInstance()) return this;
if (!other.getWorkerPool().isEmpty()) {
workerPool_ = other.workerPool_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getServiceAccount().isEmpty()) {
serviceAccount_ = other.serviceAccount_;
bitField0_ |= 0x00000002;
onChanged();
}
if (!other.getArtifactStorage().isEmpty()) {
artifactStorage_ = other.artifactStorage_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
workerPool_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
serviceAccount_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
artifactStorage_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object workerPool_ = "";
/**
*
*
* <pre>
* Required. Resource name of the Cloud Build worker pool to use. The format
* is `projects/{project}/locations/{location}/workerPools/{pool}`.
* </pre>
*
* <code>
* string worker_pool = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The workerPool.
*/
public java.lang.String getWorkerPool() {
java.lang.Object ref = workerPool_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
workerPool_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. Resource name of the Cloud Build worker pool to use. The format
* is `projects/{project}/locations/{location}/workerPools/{pool}`.
* </pre>
*
* <code>
* string worker_pool = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for workerPool.
*/
public com.google.protobuf.ByteString getWorkerPoolBytes() {
java.lang.Object ref = workerPool_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
workerPool_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. Resource name of the Cloud Build worker pool to use. The format
* is `projects/{project}/locations/{location}/workerPools/{pool}`.
* </pre>
*
* <code>
* string worker_pool = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The workerPool to set.
* @return This builder for chaining.
*/
public Builder setWorkerPool(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
workerPool_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Resource name of the Cloud Build worker pool to use. The format
* is `projects/{project}/locations/{location}/workerPools/{pool}`.
* </pre>
*
* <code>
* string worker_pool = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearWorkerPool() {
workerPool_ = getDefaultInstance().getWorkerPool();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Resource name of the Cloud Build worker pool to use. The format
* is `projects/{project}/locations/{location}/workerPools/{pool}`.
* </pre>
*
* <code>
* string worker_pool = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for workerPool to set.
* @return This builder for chaining.
*/
public Builder setWorkerPoolBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
workerPool_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object serviceAccount_ = "";
/**
*
*
* <pre>
* Optional. Google service account to use for execution. If unspecified,
* the project execution service account
* (<PROJECT_NUMBER>-compute@developer.gserviceaccount.com) will be used.
* </pre>
*
* <code>string service_account = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The serviceAccount.
*/
public java.lang.String getServiceAccount() {
java.lang.Object ref = serviceAccount_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
serviceAccount_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. Google service account to use for execution. If unspecified,
* the project execution service account
* (<PROJECT_NUMBER>-compute@developer.gserviceaccount.com) will be used.
* </pre>
*
* <code>string service_account = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for serviceAccount.
*/
public com.google.protobuf.ByteString getServiceAccountBytes() {
java.lang.Object ref = serviceAccount_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
serviceAccount_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. Google service account to use for execution. If unspecified,
* the project execution service account
* (<PROJECT_NUMBER>-compute@developer.gserviceaccount.com) will be used.
* </pre>
*
* <code>string service_account = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The serviceAccount to set.
* @return This builder for chaining.
*/
public Builder setServiceAccount(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
serviceAccount_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Google service account to use for execution. If unspecified,
* the project execution service account
* (<PROJECT_NUMBER>-compute@developer.gserviceaccount.com) will be used.
* </pre>
*
* <code>string service_account = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearServiceAccount() {
serviceAccount_ = getDefaultInstance().getServiceAccount();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Google service account to use for execution. If unspecified,
* the project execution service account
* (<PROJECT_NUMBER>-compute@developer.gserviceaccount.com) will be used.
* </pre>
*
* <code>string service_account = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for serviceAccount to set.
* @return This builder for chaining.
*/
public Builder setServiceAccountBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
serviceAccount_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private java.lang.Object artifactStorage_ = "";
/**
*
*
* <pre>
* Optional. Cloud Storage location where execution outputs should be stored.
* This can either be a bucket ("gs://my-bucket") or a path within a bucket
* ("gs://my-bucket/my-dir").
* If unspecified, a default bucket located in the same region will be used.
* </pre>
*
* <code>string artifact_storage = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The artifactStorage.
*/
public java.lang.String getArtifactStorage() {
java.lang.Object ref = artifactStorage_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
artifactStorage_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. Cloud Storage location where execution outputs should be stored.
* This can either be a bucket ("gs://my-bucket") or a path within a bucket
* ("gs://my-bucket/my-dir").
* If unspecified, a default bucket located in the same region will be used.
* </pre>
*
* <code>string artifact_storage = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for artifactStorage.
*/
public com.google.protobuf.ByteString getArtifactStorageBytes() {
java.lang.Object ref = artifactStorage_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
artifactStorage_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. Cloud Storage location where execution outputs should be stored.
* This can either be a bucket ("gs://my-bucket") or a path within a bucket
* ("gs://my-bucket/my-dir").
* If unspecified, a default bucket located in the same region will be used.
* </pre>
*
* <code>string artifact_storage = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The artifactStorage to set.
* @return This builder for chaining.
*/
public Builder setArtifactStorage(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
artifactStorage_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Cloud Storage location where execution outputs should be stored.
* This can either be a bucket ("gs://my-bucket") or a path within a bucket
* ("gs://my-bucket/my-dir").
* If unspecified, a default bucket located in the same region will be used.
* </pre>
*
* <code>string artifact_storage = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearArtifactStorage() {
artifactStorage_ = getDefaultInstance().getArtifactStorage();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Cloud Storage location where execution outputs should be stored.
* This can either be a bucket ("gs://my-bucket") or a path within a bucket
* ("gs://my-bucket/my-dir").
* If unspecified, a default bucket located in the same region will be used.
* </pre>
*
* <code>string artifact_storage = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for artifactStorage to set.
* @return This builder for chaining.
*/
public Builder setArtifactStorageBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
artifactStorage_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.deploy.v1.PrivatePool)
}
// @@protoc_insertion_point(class_scope:google.cloud.deploy.v1.PrivatePool)
private static final com.google.cloud.deploy.v1.PrivatePool DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.deploy.v1.PrivatePool();
}
public static com.google.cloud.deploy.v1.PrivatePool getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<PrivatePool> PARSER =
new com.google.protobuf.AbstractParser<PrivatePool>() {
@java.lang.Override
public PrivatePool parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<PrivatePool> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<PrivatePool> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.deploy.v1.PrivatePool getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 35,187 | java-discoveryengine/google-cloud-discoveryengine/src/main/java/com/google/cloud/discoveryengine/v1alpha/stub/DocumentServiceStubSettings.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.discoveryengine.v1alpha.stub;
import static com.google.cloud.discoveryengine.v1alpha.DocumentServiceClient.ListDocumentsPagedResponse;
import com.google.api.core.ApiFunction;
import com.google.api.core.ApiFuture;
import com.google.api.core.BetaApi;
import com.google.api.core.ObsoleteApi;
import com.google.api.gax.core.GaxProperties;
import com.google.api.gax.core.GoogleCredentialsProvider;
import com.google.api.gax.core.InstantiatingExecutorProvider;
import com.google.api.gax.grpc.GaxGrpcProperties;
import com.google.api.gax.grpc.GrpcTransportChannel;
import com.google.api.gax.grpc.InstantiatingGrpcChannelProvider;
import com.google.api.gax.grpc.ProtoOperationTransformers;
import com.google.api.gax.httpjson.GaxHttpJsonProperties;
import com.google.api.gax.httpjson.HttpJsonTransportChannel;
import com.google.api.gax.httpjson.InstantiatingHttpJsonChannelProvider;
import com.google.api.gax.longrunning.OperationSnapshot;
import com.google.api.gax.longrunning.OperationTimedPollAlgorithm;
import com.google.api.gax.retrying.RetrySettings;
import com.google.api.gax.rpc.ApiCallContext;
import com.google.api.gax.rpc.ApiClientHeaderProvider;
import com.google.api.gax.rpc.ClientContext;
import com.google.api.gax.rpc.OperationCallSettings;
import com.google.api.gax.rpc.PageContext;
import com.google.api.gax.rpc.PagedCallSettings;
import com.google.api.gax.rpc.PagedListDescriptor;
import com.google.api.gax.rpc.PagedListResponseFactory;
import com.google.api.gax.rpc.StatusCode;
import com.google.api.gax.rpc.StubSettings;
import com.google.api.gax.rpc.TransportChannelProvider;
import com.google.api.gax.rpc.UnaryCallSettings;
import com.google.api.gax.rpc.UnaryCallable;
import com.google.cloud.discoveryengine.v1alpha.BatchGetDocumentsMetadataRequest;
import com.google.cloud.discoveryengine.v1alpha.BatchGetDocumentsMetadataResponse;
import com.google.cloud.discoveryengine.v1alpha.CreateDocumentRequest;
import com.google.cloud.discoveryengine.v1alpha.DeleteDocumentRequest;
import com.google.cloud.discoveryengine.v1alpha.Document;
import com.google.cloud.discoveryengine.v1alpha.GetDocumentRequest;
import com.google.cloud.discoveryengine.v1alpha.GetProcessedDocumentRequest;
import com.google.cloud.discoveryengine.v1alpha.ImportDocumentsMetadata;
import com.google.cloud.discoveryengine.v1alpha.ImportDocumentsRequest;
import com.google.cloud.discoveryengine.v1alpha.ImportDocumentsResponse;
import com.google.cloud.discoveryengine.v1alpha.ListDocumentsRequest;
import com.google.cloud.discoveryengine.v1alpha.ListDocumentsResponse;
import com.google.cloud.discoveryengine.v1alpha.ProcessedDocument;
import com.google.cloud.discoveryengine.v1alpha.PurgeDocumentsMetadata;
import com.google.cloud.discoveryengine.v1alpha.PurgeDocumentsRequest;
import com.google.cloud.discoveryengine.v1alpha.PurgeDocumentsResponse;
import com.google.cloud.discoveryengine.v1alpha.UpdateDocumentRequest;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
import com.google.longrunning.Operation;
import com.google.protobuf.Empty;
import java.io.IOException;
import java.time.Duration;
import java.util.List;
import javax.annotation.Generated;
// AUTO-GENERATED DOCUMENTATION AND CLASS.
/**
* Settings class to configure an instance of {@link DocumentServiceStub}.
*
* <p>The default instance has everything set to sensible defaults:
*
* <ul>
* <li>The default service address (discoveryengine.googleapis.com) and default port (443) are
* used.
* <li>Credentials are acquired automatically through Application Default Credentials.
* <li>Retries are configured for idempotent methods but not for non-idempotent methods.
* </ul>
*
* <p>The builder of this class is recursive, so contained classes are themselves builders. When
* build() is called, the tree of builders is called to create the complete settings object.
*
* <p>For example, to set the
* [RetrySettings](https://cloud.google.com/java/docs/reference/gax/latest/com.google.api.gax.retrying.RetrySettings)
* of getDocument:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* DocumentServiceStubSettings.Builder documentServiceSettingsBuilder =
* DocumentServiceStubSettings.newBuilder();
* documentServiceSettingsBuilder
* .getDocumentSettings()
* .setRetrySettings(
* documentServiceSettingsBuilder
* .getDocumentSettings()
* .getRetrySettings()
* .toBuilder()
* .setInitialRetryDelayDuration(Duration.ofSeconds(1))
* .setInitialRpcTimeoutDuration(Duration.ofSeconds(5))
* .setMaxAttempts(5)
* .setMaxRetryDelayDuration(Duration.ofSeconds(30))
* .setMaxRpcTimeoutDuration(Duration.ofSeconds(60))
* .setRetryDelayMultiplier(1.3)
* .setRpcTimeoutMultiplier(1.5)
* .setTotalTimeoutDuration(Duration.ofSeconds(300))
* .build());
* DocumentServiceStubSettings documentServiceSettings = documentServiceSettingsBuilder.build();
* }</pre>
*
* Please refer to the [Client Side Retry
* Guide](https://github.com/googleapis/google-cloud-java/blob/main/docs/client_retries.md) for
* additional support in setting retries.
*
* <p>To configure the RetrySettings of a Long Running Operation method, create an
* OperationTimedPollAlgorithm object and update the RPC's polling algorithm. For example, to
* configure the RetrySettings for importDocuments:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* DocumentServiceStubSettings.Builder documentServiceSettingsBuilder =
* DocumentServiceStubSettings.newBuilder();
* TimedRetryAlgorithm timedRetryAlgorithm =
* OperationalTimedPollAlgorithm.create(
* RetrySettings.newBuilder()
* .setInitialRetryDelayDuration(Duration.ofMillis(500))
* .setRetryDelayMultiplier(1.5)
* .setMaxRetryDelayDuration(Duration.ofMillis(5000))
* .setTotalTimeoutDuration(Duration.ofHours(24))
* .build());
* documentServiceSettingsBuilder
* .createClusterOperationSettings()
* .setPollingAlgorithm(timedRetryAlgorithm)
* .build();
* }</pre>
*/
@BetaApi
@Generated("by gapic-generator-java")
public class DocumentServiceStubSettings extends StubSettings<DocumentServiceStubSettings> {
/** The default scopes of the service. */
private static final ImmutableList<String> DEFAULT_SERVICE_SCOPES =
ImmutableList.<String>builder().add("https://www.googleapis.com/auth/cloud-platform").build();
private final UnaryCallSettings<GetDocumentRequest, Document> getDocumentSettings;
private final PagedCallSettings<
ListDocumentsRequest, ListDocumentsResponse, ListDocumentsPagedResponse>
listDocumentsSettings;
private final UnaryCallSettings<CreateDocumentRequest, Document> createDocumentSettings;
private final UnaryCallSettings<UpdateDocumentRequest, Document> updateDocumentSettings;
private final UnaryCallSettings<DeleteDocumentRequest, Empty> deleteDocumentSettings;
private final UnaryCallSettings<ImportDocumentsRequest, Operation> importDocumentsSettings;
private final OperationCallSettings<
ImportDocumentsRequest, ImportDocumentsResponse, ImportDocumentsMetadata>
importDocumentsOperationSettings;
private final UnaryCallSettings<PurgeDocumentsRequest, Operation> purgeDocumentsSettings;
private final OperationCallSettings<
PurgeDocumentsRequest, PurgeDocumentsResponse, PurgeDocumentsMetadata>
purgeDocumentsOperationSettings;
private final UnaryCallSettings<GetProcessedDocumentRequest, ProcessedDocument>
getProcessedDocumentSettings;
private final UnaryCallSettings<
BatchGetDocumentsMetadataRequest, BatchGetDocumentsMetadataResponse>
batchGetDocumentsMetadataSettings;
private static final PagedListDescriptor<ListDocumentsRequest, ListDocumentsResponse, Document>
LIST_DOCUMENTS_PAGE_STR_DESC =
new PagedListDescriptor<ListDocumentsRequest, ListDocumentsResponse, Document>() {
@Override
public String emptyToken() {
return "";
}
@Override
public ListDocumentsRequest injectToken(ListDocumentsRequest payload, String token) {
return ListDocumentsRequest.newBuilder(payload).setPageToken(token).build();
}
@Override
public ListDocumentsRequest injectPageSize(ListDocumentsRequest payload, int pageSize) {
return ListDocumentsRequest.newBuilder(payload).setPageSize(pageSize).build();
}
@Override
public Integer extractPageSize(ListDocumentsRequest payload) {
return payload.getPageSize();
}
@Override
public String extractNextToken(ListDocumentsResponse payload) {
return payload.getNextPageToken();
}
@Override
public Iterable<Document> extractResources(ListDocumentsResponse payload) {
return payload.getDocumentsList();
}
};
private static final PagedListResponseFactory<
ListDocumentsRequest, ListDocumentsResponse, ListDocumentsPagedResponse>
LIST_DOCUMENTS_PAGE_STR_FACT =
new PagedListResponseFactory<
ListDocumentsRequest, ListDocumentsResponse, ListDocumentsPagedResponse>() {
@Override
public ApiFuture<ListDocumentsPagedResponse> getFuturePagedResponse(
UnaryCallable<ListDocumentsRequest, ListDocumentsResponse> callable,
ListDocumentsRequest request,
ApiCallContext context,
ApiFuture<ListDocumentsResponse> futureResponse) {
PageContext<ListDocumentsRequest, ListDocumentsResponse, Document> pageContext =
PageContext.create(callable, LIST_DOCUMENTS_PAGE_STR_DESC, request, context);
return ListDocumentsPagedResponse.createAsync(pageContext, futureResponse);
}
};
/** Returns the object with the settings used for calls to getDocument. */
public UnaryCallSettings<GetDocumentRequest, Document> getDocumentSettings() {
return getDocumentSettings;
}
/** Returns the object with the settings used for calls to listDocuments. */
public PagedCallSettings<ListDocumentsRequest, ListDocumentsResponse, ListDocumentsPagedResponse>
listDocumentsSettings() {
return listDocumentsSettings;
}
/** Returns the object with the settings used for calls to createDocument. */
public UnaryCallSettings<CreateDocumentRequest, Document> createDocumentSettings() {
return createDocumentSettings;
}
/** Returns the object with the settings used for calls to updateDocument. */
public UnaryCallSettings<UpdateDocumentRequest, Document> updateDocumentSettings() {
return updateDocumentSettings;
}
/** Returns the object with the settings used for calls to deleteDocument. */
public UnaryCallSettings<DeleteDocumentRequest, Empty> deleteDocumentSettings() {
return deleteDocumentSettings;
}
/** Returns the object with the settings used for calls to importDocuments. */
public UnaryCallSettings<ImportDocumentsRequest, Operation> importDocumentsSettings() {
return importDocumentsSettings;
}
/** Returns the object with the settings used for calls to importDocuments. */
public OperationCallSettings<
ImportDocumentsRequest, ImportDocumentsResponse, ImportDocumentsMetadata>
importDocumentsOperationSettings() {
return importDocumentsOperationSettings;
}
/** Returns the object with the settings used for calls to purgeDocuments. */
public UnaryCallSettings<PurgeDocumentsRequest, Operation> purgeDocumentsSettings() {
return purgeDocumentsSettings;
}
/** Returns the object with the settings used for calls to purgeDocuments. */
public OperationCallSettings<
PurgeDocumentsRequest, PurgeDocumentsResponse, PurgeDocumentsMetadata>
purgeDocumentsOperationSettings() {
return purgeDocumentsOperationSettings;
}
/** Returns the object with the settings used for calls to getProcessedDocument. */
public UnaryCallSettings<GetProcessedDocumentRequest, ProcessedDocument>
getProcessedDocumentSettings() {
return getProcessedDocumentSettings;
}
/** Returns the object with the settings used for calls to batchGetDocumentsMetadata. */
public UnaryCallSettings<BatchGetDocumentsMetadataRequest, BatchGetDocumentsMetadataResponse>
batchGetDocumentsMetadataSettings() {
return batchGetDocumentsMetadataSettings;
}
public DocumentServiceStub createStub() throws IOException {
if (getTransportChannelProvider()
.getTransportName()
.equals(GrpcTransportChannel.getGrpcTransportName())) {
return GrpcDocumentServiceStub.create(this);
}
if (getTransportChannelProvider()
.getTransportName()
.equals(HttpJsonTransportChannel.getHttpJsonTransportName())) {
return HttpJsonDocumentServiceStub.create(this);
}
throw new UnsupportedOperationException(
String.format(
"Transport not supported: %s", getTransportChannelProvider().getTransportName()));
}
/** Returns the default service name. */
@Override
public String getServiceName() {
return "discoveryengine";
}
/** Returns a builder for the default ExecutorProvider for this service. */
public static InstantiatingExecutorProvider.Builder defaultExecutorProviderBuilder() {
return InstantiatingExecutorProvider.newBuilder();
}
/** Returns the default service endpoint. */
@ObsoleteApi("Use getEndpoint() instead")
public static String getDefaultEndpoint() {
return "discoveryengine.googleapis.com:443";
}
/** Returns the default mTLS service endpoint. */
public static String getDefaultMtlsEndpoint() {
return "discoveryengine.mtls.googleapis.com:443";
}
/** Returns the default service scopes. */
public static List<String> getDefaultServiceScopes() {
return DEFAULT_SERVICE_SCOPES;
}
/** Returns a builder for the default credentials for this service. */
public static GoogleCredentialsProvider.Builder defaultCredentialsProviderBuilder() {
return GoogleCredentialsProvider.newBuilder()
.setScopesToApply(DEFAULT_SERVICE_SCOPES)
.setUseJwtAccessWithScope(true);
}
/** Returns a builder for the default gRPC ChannelProvider for this service. */
public static InstantiatingGrpcChannelProvider.Builder defaultGrpcTransportProviderBuilder() {
return InstantiatingGrpcChannelProvider.newBuilder()
.setMaxInboundMessageSize(Integer.MAX_VALUE);
}
/** Returns a builder for the default REST ChannelProvider for this service. */
@BetaApi
public static InstantiatingHttpJsonChannelProvider.Builder
defaultHttpJsonTransportProviderBuilder() {
return InstantiatingHttpJsonChannelProvider.newBuilder();
}
public static TransportChannelProvider defaultTransportChannelProvider() {
return defaultGrpcTransportProviderBuilder().build();
}
public static ApiClientHeaderProvider.Builder defaultGrpcApiClientHeaderProviderBuilder() {
return ApiClientHeaderProvider.newBuilder()
.setGeneratedLibToken(
"gapic", GaxProperties.getLibraryVersion(DocumentServiceStubSettings.class))
.setTransportToken(
GaxGrpcProperties.getGrpcTokenName(), GaxGrpcProperties.getGrpcVersion());
}
public static ApiClientHeaderProvider.Builder defaultHttpJsonApiClientHeaderProviderBuilder() {
return ApiClientHeaderProvider.newBuilder()
.setGeneratedLibToken(
"gapic", GaxProperties.getLibraryVersion(DocumentServiceStubSettings.class))
.setTransportToken(
GaxHttpJsonProperties.getHttpJsonTokenName(),
GaxHttpJsonProperties.getHttpJsonVersion());
}
public static ApiClientHeaderProvider.Builder defaultApiClientHeaderProviderBuilder() {
return DocumentServiceStubSettings.defaultGrpcApiClientHeaderProviderBuilder();
}
/** Returns a new gRPC builder for this class. */
public static Builder newBuilder() {
return Builder.createDefault();
}
/** Returns a new REST builder for this class. */
public static Builder newHttpJsonBuilder() {
return Builder.createHttpJsonDefault();
}
/** Returns a new builder for this class. */
public static Builder newBuilder(ClientContext clientContext) {
return new Builder(clientContext);
}
/** Returns a builder containing all the values of this settings class. */
public Builder toBuilder() {
return new Builder(this);
}
protected DocumentServiceStubSettings(Builder settingsBuilder) throws IOException {
super(settingsBuilder);
getDocumentSettings = settingsBuilder.getDocumentSettings().build();
listDocumentsSettings = settingsBuilder.listDocumentsSettings().build();
createDocumentSettings = settingsBuilder.createDocumentSettings().build();
updateDocumentSettings = settingsBuilder.updateDocumentSettings().build();
deleteDocumentSettings = settingsBuilder.deleteDocumentSettings().build();
importDocumentsSettings = settingsBuilder.importDocumentsSettings().build();
importDocumentsOperationSettings = settingsBuilder.importDocumentsOperationSettings().build();
purgeDocumentsSettings = settingsBuilder.purgeDocumentsSettings().build();
purgeDocumentsOperationSettings = settingsBuilder.purgeDocumentsOperationSettings().build();
getProcessedDocumentSettings = settingsBuilder.getProcessedDocumentSettings().build();
batchGetDocumentsMetadataSettings = settingsBuilder.batchGetDocumentsMetadataSettings().build();
}
/** Builder for DocumentServiceStubSettings. */
public static class Builder extends StubSettings.Builder<DocumentServiceStubSettings, Builder> {
private final ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders;
private final UnaryCallSettings.Builder<GetDocumentRequest, Document> getDocumentSettings;
private final PagedCallSettings.Builder<
ListDocumentsRequest, ListDocumentsResponse, ListDocumentsPagedResponse>
listDocumentsSettings;
private final UnaryCallSettings.Builder<CreateDocumentRequest, Document> createDocumentSettings;
private final UnaryCallSettings.Builder<UpdateDocumentRequest, Document> updateDocumentSettings;
private final UnaryCallSettings.Builder<DeleteDocumentRequest, Empty> deleteDocumentSettings;
private final UnaryCallSettings.Builder<ImportDocumentsRequest, Operation>
importDocumentsSettings;
private final OperationCallSettings.Builder<
ImportDocumentsRequest, ImportDocumentsResponse, ImportDocumentsMetadata>
importDocumentsOperationSettings;
private final UnaryCallSettings.Builder<PurgeDocumentsRequest, Operation>
purgeDocumentsSettings;
private final OperationCallSettings.Builder<
PurgeDocumentsRequest, PurgeDocumentsResponse, PurgeDocumentsMetadata>
purgeDocumentsOperationSettings;
private final UnaryCallSettings.Builder<GetProcessedDocumentRequest, ProcessedDocument>
getProcessedDocumentSettings;
private final UnaryCallSettings.Builder<
BatchGetDocumentsMetadataRequest, BatchGetDocumentsMetadataResponse>
batchGetDocumentsMetadataSettings;
private static final ImmutableMap<String, ImmutableSet<StatusCode.Code>>
RETRYABLE_CODE_DEFINITIONS;
static {
ImmutableMap.Builder<String, ImmutableSet<StatusCode.Code>> definitions =
ImmutableMap.builder();
definitions.put(
"retry_policy_1_codes",
ImmutableSet.copyOf(Lists.<StatusCode.Code>newArrayList(StatusCode.Code.UNAVAILABLE)));
definitions.put(
"retry_policy_2_codes",
ImmutableSet.copyOf(Lists.<StatusCode.Code>newArrayList(StatusCode.Code.UNAVAILABLE)));
RETRYABLE_CODE_DEFINITIONS = definitions.build();
}
private static final ImmutableMap<String, RetrySettings> RETRY_PARAM_DEFINITIONS;
static {
ImmutableMap.Builder<String, RetrySettings> definitions = ImmutableMap.builder();
RetrySettings settings = null;
settings =
RetrySettings.newBuilder()
.setInitialRetryDelayDuration(Duration.ofMillis(1000L))
.setRetryDelayMultiplier(1.3)
.setMaxRetryDelayDuration(Duration.ofMillis(10000L))
.setInitialRpcTimeoutDuration(Duration.ofMillis(30000L))
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeoutDuration(Duration.ofMillis(30000L))
.setTotalTimeoutDuration(Duration.ofMillis(30000L))
.build();
definitions.put("retry_policy_1_params", settings);
settings =
RetrySettings.newBuilder()
.setInitialRetryDelayDuration(Duration.ofMillis(1000L))
.setRetryDelayMultiplier(1.3)
.setMaxRetryDelayDuration(Duration.ofMillis(30000L))
.setInitialRpcTimeoutDuration(Duration.ofMillis(300000L))
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeoutDuration(Duration.ofMillis(300000L))
.setTotalTimeoutDuration(Duration.ofMillis(300000L))
.build();
definitions.put("retry_policy_2_params", settings);
RETRY_PARAM_DEFINITIONS = definitions.build();
}
protected Builder() {
this(((ClientContext) null));
}
protected Builder(ClientContext clientContext) {
super(clientContext);
getDocumentSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
listDocumentsSettings = PagedCallSettings.newBuilder(LIST_DOCUMENTS_PAGE_STR_FACT);
createDocumentSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
updateDocumentSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
deleteDocumentSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
importDocumentsSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
importDocumentsOperationSettings = OperationCallSettings.newBuilder();
purgeDocumentsSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
purgeDocumentsOperationSettings = OperationCallSettings.newBuilder();
getProcessedDocumentSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
batchGetDocumentsMetadataSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
unaryMethodSettingsBuilders =
ImmutableList.<UnaryCallSettings.Builder<?, ?>>of(
getDocumentSettings,
listDocumentsSettings,
createDocumentSettings,
updateDocumentSettings,
deleteDocumentSettings,
importDocumentsSettings,
purgeDocumentsSettings,
getProcessedDocumentSettings,
batchGetDocumentsMetadataSettings);
initDefaults(this);
}
protected Builder(DocumentServiceStubSettings settings) {
super(settings);
getDocumentSettings = settings.getDocumentSettings.toBuilder();
listDocumentsSettings = settings.listDocumentsSettings.toBuilder();
createDocumentSettings = settings.createDocumentSettings.toBuilder();
updateDocumentSettings = settings.updateDocumentSettings.toBuilder();
deleteDocumentSettings = settings.deleteDocumentSettings.toBuilder();
importDocumentsSettings = settings.importDocumentsSettings.toBuilder();
importDocumentsOperationSettings = settings.importDocumentsOperationSettings.toBuilder();
purgeDocumentsSettings = settings.purgeDocumentsSettings.toBuilder();
purgeDocumentsOperationSettings = settings.purgeDocumentsOperationSettings.toBuilder();
getProcessedDocumentSettings = settings.getProcessedDocumentSettings.toBuilder();
batchGetDocumentsMetadataSettings = settings.batchGetDocumentsMetadataSettings.toBuilder();
unaryMethodSettingsBuilders =
ImmutableList.<UnaryCallSettings.Builder<?, ?>>of(
getDocumentSettings,
listDocumentsSettings,
createDocumentSettings,
updateDocumentSettings,
deleteDocumentSettings,
importDocumentsSettings,
purgeDocumentsSettings,
getProcessedDocumentSettings,
batchGetDocumentsMetadataSettings);
}
private static Builder createDefault() {
Builder builder = new Builder(((ClientContext) null));
builder.setTransportChannelProvider(defaultTransportChannelProvider());
builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build());
builder.setInternalHeaderProvider(defaultApiClientHeaderProviderBuilder().build());
builder.setMtlsEndpoint(getDefaultMtlsEndpoint());
builder.setSwitchToMtlsEndpointAllowed(true);
return initDefaults(builder);
}
private static Builder createHttpJsonDefault() {
Builder builder = new Builder(((ClientContext) null));
builder.setTransportChannelProvider(defaultHttpJsonTransportProviderBuilder().build());
builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build());
builder.setInternalHeaderProvider(defaultHttpJsonApiClientHeaderProviderBuilder().build());
builder.setMtlsEndpoint(getDefaultMtlsEndpoint());
builder.setSwitchToMtlsEndpointAllowed(true);
return initDefaults(builder);
}
private static Builder initDefaults(Builder builder) {
builder
.getDocumentSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_1_params"));
builder
.listDocumentsSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_1_params"));
builder
.createDocumentSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_1_params"));
builder
.updateDocumentSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_1_params"));
builder
.deleteDocumentSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_1_params"));
builder
.importDocumentsSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_2_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_2_params"));
builder
.purgeDocumentsSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_1_params"));
builder
.getProcessedDocumentSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_1_params"));
builder
.batchGetDocumentsMetadataSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_1_params"));
builder
.importDocumentsOperationSettings()
.setInitialCallSettings(
UnaryCallSettings
.<ImportDocumentsRequest, OperationSnapshot>newUnaryCallSettingsBuilder()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_2_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_2_params"))
.build())
.setResponseTransformer(
ProtoOperationTransformers.ResponseTransformer.create(ImportDocumentsResponse.class))
.setMetadataTransformer(
ProtoOperationTransformers.MetadataTransformer.create(ImportDocumentsMetadata.class))
.setPollingAlgorithm(
OperationTimedPollAlgorithm.create(
RetrySettings.newBuilder()
.setInitialRetryDelayDuration(Duration.ofMillis(5000L))
.setRetryDelayMultiplier(1.5)
.setMaxRetryDelayDuration(Duration.ofMillis(45000L))
.setInitialRpcTimeoutDuration(Duration.ZERO)
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeoutDuration(Duration.ZERO)
.setTotalTimeoutDuration(Duration.ofMillis(300000L))
.build()));
builder
.purgeDocumentsOperationSettings()
.setInitialCallSettings(
UnaryCallSettings
.<PurgeDocumentsRequest, OperationSnapshot>newUnaryCallSettingsBuilder()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_1_params"))
.build())
.setResponseTransformer(
ProtoOperationTransformers.ResponseTransformer.create(PurgeDocumentsResponse.class))
.setMetadataTransformer(
ProtoOperationTransformers.MetadataTransformer.create(PurgeDocumentsMetadata.class))
.setPollingAlgorithm(
OperationTimedPollAlgorithm.create(
RetrySettings.newBuilder()
.setInitialRetryDelayDuration(Duration.ofMillis(5000L))
.setRetryDelayMultiplier(1.5)
.setMaxRetryDelayDuration(Duration.ofMillis(45000L))
.setInitialRpcTimeoutDuration(Duration.ZERO)
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeoutDuration(Duration.ZERO)
.setTotalTimeoutDuration(Duration.ofMillis(300000L))
.build()));
return builder;
}
/**
* Applies the given settings updater function to all of the unary API methods in this service.
*
* <p>Note: This method does not support applying settings to streaming methods.
*/
public Builder applyToAllUnaryMethods(
ApiFunction<UnaryCallSettings.Builder<?, ?>, Void> settingsUpdater) {
super.applyToAllUnaryMethods(unaryMethodSettingsBuilders, settingsUpdater);
return this;
}
public ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders() {
return unaryMethodSettingsBuilders;
}
/** Returns the builder for the settings used for calls to getDocument. */
public UnaryCallSettings.Builder<GetDocumentRequest, Document> getDocumentSettings() {
return getDocumentSettings;
}
/** Returns the builder for the settings used for calls to listDocuments. */
public PagedCallSettings.Builder<
ListDocumentsRequest, ListDocumentsResponse, ListDocumentsPagedResponse>
listDocumentsSettings() {
return listDocumentsSettings;
}
/** Returns the builder for the settings used for calls to createDocument. */
public UnaryCallSettings.Builder<CreateDocumentRequest, Document> createDocumentSettings() {
return createDocumentSettings;
}
/** Returns the builder for the settings used for calls to updateDocument. */
public UnaryCallSettings.Builder<UpdateDocumentRequest, Document> updateDocumentSettings() {
return updateDocumentSettings;
}
/** Returns the builder for the settings used for calls to deleteDocument. */
public UnaryCallSettings.Builder<DeleteDocumentRequest, Empty> deleteDocumentSettings() {
return deleteDocumentSettings;
}
/** Returns the builder for the settings used for calls to importDocuments. */
public UnaryCallSettings.Builder<ImportDocumentsRequest, Operation> importDocumentsSettings() {
return importDocumentsSettings;
}
/** Returns the builder for the settings used for calls to importDocuments. */
public OperationCallSettings.Builder<
ImportDocumentsRequest, ImportDocumentsResponse, ImportDocumentsMetadata>
importDocumentsOperationSettings() {
return importDocumentsOperationSettings;
}
/** Returns the builder for the settings used for calls to purgeDocuments. */
public UnaryCallSettings.Builder<PurgeDocumentsRequest, Operation> purgeDocumentsSettings() {
return purgeDocumentsSettings;
}
/** Returns the builder for the settings used for calls to purgeDocuments. */
public OperationCallSettings.Builder<
PurgeDocumentsRequest, PurgeDocumentsResponse, PurgeDocumentsMetadata>
purgeDocumentsOperationSettings() {
return purgeDocumentsOperationSettings;
}
/** Returns the builder for the settings used for calls to getProcessedDocument. */
public UnaryCallSettings.Builder<GetProcessedDocumentRequest, ProcessedDocument>
getProcessedDocumentSettings() {
return getProcessedDocumentSettings;
}
/** Returns the builder for the settings used for calls to batchGetDocumentsMetadata. */
public UnaryCallSettings.Builder<
BatchGetDocumentsMetadataRequest, BatchGetDocumentsMetadataResponse>
batchGetDocumentsMetadataSettings() {
return batchGetDocumentsMetadataSettings;
}
@Override
public DocumentServiceStubSettings build() throws IOException {
return new DocumentServiceStubSettings(this);
}
}
}
|
apache/geode | 34,779 | geode-core/src/main/java/org/apache/geode/internal/cache/InternalCacheForClientAccess.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geode.internal.cache;
import static org.apache.geode.util.internal.UncheckedUtils.uncheckedCast;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Properties;
import java.util.Set;
import java.util.concurrent.Executor;
import java.util.concurrent.TimeUnit;
import javax.naming.Context;
import javax.transaction.TransactionManager;
import io.micrometer.core.instrument.MeterRegistry;
import org.apache.geode.CancelCriterion;
import org.apache.geode.LogWriter;
import org.apache.geode.annotations.VisibleForTesting;
import org.apache.geode.cache.Cache;
import org.apache.geode.cache.CacheClosedException;
import org.apache.geode.cache.CacheTransactionManager;
import org.apache.geode.cache.CacheWriterException;
import org.apache.geode.cache.Declarable;
import org.apache.geode.cache.DiskStore;
import org.apache.geode.cache.DiskStoreFactory;
import org.apache.geode.cache.DynamicRegionFactory;
import org.apache.geode.cache.GatewayException;
import org.apache.geode.cache.Region;
import org.apache.geode.cache.RegionAttributes;
import org.apache.geode.cache.RegionExistsException;
import org.apache.geode.cache.RegionFactory;
import org.apache.geode.cache.RegionShortcut;
import org.apache.geode.cache.TimeoutException;
import org.apache.geode.cache.asyncqueue.AsyncEventQueue;
import org.apache.geode.cache.asyncqueue.AsyncEventQueueFactory;
import org.apache.geode.cache.asyncqueue.internal.AsyncEventQueueImpl;
import org.apache.geode.cache.client.internal.ClientMetadataService;
import org.apache.geode.cache.control.ResourceManager;
import org.apache.geode.cache.query.QueryService;
import org.apache.geode.cache.query.internal.InternalQueryService;
import org.apache.geode.cache.query.internal.QueryMonitor;
import org.apache.geode.cache.query.internal.cq.CqService;
import org.apache.geode.cache.server.CacheServer;
import org.apache.geode.cache.snapshot.CacheSnapshotService;
import org.apache.geode.cache.util.GatewayConflictResolver;
import org.apache.geode.cache.wan.GatewayReceiver;
import org.apache.geode.cache.wan.GatewayReceiverFactory;
import org.apache.geode.cache.wan.GatewaySender;
import org.apache.geode.cache.wan.GatewaySenderFactory;
import org.apache.geode.distributed.DistributedLockService;
import org.apache.geode.distributed.DistributedMember;
import org.apache.geode.distributed.DistributedSystem;
import org.apache.geode.distributed.internal.DistributionAdvisor;
import org.apache.geode.distributed.internal.DistributionManager;
import org.apache.geode.distributed.internal.InternalDistributedSystem;
import org.apache.geode.distributed.internal.membership.InternalDistributedMember;
import org.apache.geode.i18n.LogWriterI18n;
import org.apache.geode.internal.SystemTimer;
import org.apache.geode.internal.admin.ClientHealthMonitoringRegion;
import org.apache.geode.internal.cache.InitialImageOperation.Entry;
import org.apache.geode.internal.cache.backup.BackupService;
import org.apache.geode.internal.cache.control.InternalResourceManager;
import org.apache.geode.internal.cache.control.ResourceAdvisor;
import org.apache.geode.internal.cache.event.EventTrackerExpiryTask;
import org.apache.geode.internal.cache.eviction.HeapEvictor;
import org.apache.geode.internal.cache.eviction.OffHeapEvictor;
import org.apache.geode.internal.cache.extension.ExtensionPoint;
import org.apache.geode.internal.cache.persistence.PersistentMemberManager;
import org.apache.geode.internal.cache.tier.sockets.CacheClientNotifier;
import org.apache.geode.internal.cache.tier.sockets.ClientProxyMembershipID;
import org.apache.geode.internal.logging.InternalLogWriter;
import org.apache.geode.internal.offheap.MemoryAllocator;
import org.apache.geode.internal.security.SecurityService;
import org.apache.geode.internal.statistics.StatisticsClock;
import org.apache.geode.management.internal.JmxManagerAdvisor;
import org.apache.geode.management.internal.RestAgent;
import org.apache.geode.pdx.JSONFormatter;
import org.apache.geode.pdx.PdxInstance;
import org.apache.geode.pdx.PdxInstanceFactory;
import org.apache.geode.pdx.PdxSerializer;
import org.apache.geode.pdx.internal.TypeRegistry;
import org.apache.geode.security.NotAuthorizedException;
/**
* This class delegates all methods to the InternalCache instance
* it wraps. Any regions returned will be checked and if they are
* internal an exception is thrown if they are.
*
* <p>
* Note: an instance of this class should be used by servers that
* process requests from clients that contains region names to prevent
* the client from directly accessing internal regions.
*/
public class InternalCacheForClientAccess implements InternalCache {
private final InternalCache delegate;
public InternalCacheForClientAccess(InternalCache delegate) {
this.delegate = delegate;
}
private void checkForInternalRegion(Region<?, ?> r) {
if (r == null) {
return;
}
InternalRegion ir = (InternalRegion) r;
if (ir.isInternalRegion()
&& !r.getName().equals(DynamicRegionFactory.DYNAMIC_REGION_LIST_NAME)
&& !r.getName().equals(ClientHealthMonitoringRegion.ADMIN_REGION_NAME)) {
throw new NotAuthorizedException("The region " + r.getName()
+ " is an internal region that a client is never allowed to access");
}
}
@SuppressWarnings("unchecked")
private void checkSetOfRegions(Set regions) {
for (Region r : (Set<Region>) regions) {
checkForInternalRegion(r);
}
}
@Override
public <K, V> Region<K, V> getRegion(String path) {
Region<K, V> result = delegate.getRegion(path);
checkForInternalRegion(result);
return result;
}
/**
* This method can be used to locate an internal region.
* It should not be invoked with a region name obtained
* from a client.
*/
public <K, V> Region<K, V> getInternalRegion(String path) {
return delegate.getRegion(path);
}
@Override
public <K, V> Region<K, V> getRegion(String path, boolean returnDestroyedRegion) {
Region result = delegate.getRegion(path, returnDestroyedRegion);
checkForInternalRegion(result);
return uncheckedCast(result);
}
@Override
public InternalRegion getReinitializingRegion(String fullPath) {
InternalRegion result = delegate.getReinitializingRegion(fullPath);
checkForInternalRegion(result);
return result;
}
@Override
public <K, V> Region<K, V> getRegionByPath(String path) {
InternalRegion result = delegate.getInternalRegionByPath(path);
checkForInternalRegion(result);
return uncheckedCast(result);
}
@Override
public InternalRegion getInternalRegionByPath(String path) {
InternalRegion result = delegate.getInternalRegionByPath(path);
checkForInternalRegion(result);
return result;
}
@Override
public InternalRegion getRegionByPathForProcessing(String path) {
InternalRegion result = delegate.getRegionByPathForProcessing(path);
checkForInternalRegion(result);
return result;
}
@Override
public DistributedRegion getRegionInDestroy(String path) {
DistributedRegion result = delegate.getRegionInDestroy(path);
checkForInternalRegion(result);
return result;
}
@Override
public Set<PartitionedRegion> getPartitionedRegions() {
Set<PartitionedRegion> result = delegate.getPartitionedRegions();
checkSetOfRegions(result);
return result;
}
@Override
public Set<Region<?, ?>> rootRegions() {
Set<Region<?, ?>> result = delegate.rootRegions();
checkSetOfRegions(result);
return result;
}
@Override
public Set<Region<?, ?>> rootRegions(boolean includePRAdminRegions) {
Set<Region<?, ?>> result = delegate.rootRegions(includePRAdminRegions);
checkSetOfRegions(result);
return result;
}
@Override
public Set<InternalRegion> getAllRegions() {
Set<InternalRegion> result = delegate.getAllRegions();
checkSetOfRegions(result);
return result;
}
@Override
public <K, V> Region<K, V> createVMRegion(String name, RegionAttributes<K, V> p_attrs,
InternalRegionArguments internalRegionArgs)
throws RegionExistsException, TimeoutException, IOException, ClassNotFoundException {
if (internalRegionArgs != null) {
if (internalRegionArgs.isInternalRegion()
|| internalRegionArgs.isUsedForPartitionedRegionBucket()
|| internalRegionArgs.isUsedForMetaRegion()
|| internalRegionArgs.isUsedForSerialGatewaySenderQueue()
|| internalRegionArgs.isUsedForParallelGatewaySenderQueue()) {
throw new NotAuthorizedException("The region " + name
+ " is an internal region that a client is never allowed to create");
}
}
return delegate.createVMRegion(name, p_attrs, internalRegionArgs);
}
/**
* This method allows server-side code to create an internal region. It should
* not be invoked with a region name obtained from a client.
*/
public <K, V> Region<K, V> createInternalRegion(String name, RegionAttributes<K, V> p_attrs,
InternalRegionArguments internalRegionArgs)
throws RegionExistsException, TimeoutException, IOException, ClassNotFoundException {
return delegate.createVMRegion(name, p_attrs, internalRegionArgs);
}
@Override
public Cache getReconnectedCache() {
Cache reconnectedCache = delegate.getReconnectedCache();
if (reconnectedCache != null) {
return new InternalCacheForClientAccess((InternalCache) reconnectedCache);
}
return null;
}
@Override
public FilterProfile getFilterProfile(String regionName) {
InternalRegion r = (InternalRegion) getRegion(regionName, true);
if (r != null) {
return r.getFilterProfile();
}
return null;
}
@Override
public <K, V> Region<K, V> basicCreateRegion(String name, RegionAttributes<K, V> attrs)
throws RegionExistsException, TimeoutException {
return delegate.basicCreateRegion(name, attrs);
}
@Override
public <K, V> Region<K, V> createVMRegion(String name, RegionAttributes<K, V> aRegionAttributes)
throws RegionExistsException, TimeoutException {
return delegate.createVMRegion(name, aRegionAttributes);
}
@Override
public <K, V> Region<K, V> createRegion(String name, RegionAttributes<K, V> aRegionAttributes)
throws RegionExistsException, TimeoutException {
return delegate.createRegion(name, aRegionAttributes);
}
@Override
public <K, V> RegionFactory<K, V> createRegionFactory() {
return delegate.createRegionFactory();
}
@Override
public <K, V> RegionFactory<K, V> createRegionFactory(RegionShortcut shortcut) {
return delegate.createRegionFactory(shortcut);
}
@Override
public <K, V> RegionFactory<K, V> createRegionFactory(String regionAttributesId) {
return delegate.createRegionFactory(regionAttributesId);
}
@Override
public <K, V> RegionFactory<K, V> createRegionFactory(RegionAttributes<K, V> regionAttributes) {
return delegate.createRegionFactory(regionAttributes);
}
@Override
public void close(boolean keepAlive) {
delegate.close(keepAlive);
}
@Override
public LogWriterI18n getLoggerI18n() {
return delegate.getLoggerI18n();
}
@Override
public LogWriterI18n getSecurityLoggerI18n() {
return delegate.getSecurityLoggerI18n();
}
@Override
public int getLockTimeout() {
return delegate.getLockTimeout();
}
@Override
public void setLockTimeout(int seconds) {
delegate.setLockTimeout(seconds);
}
@Override
public int getMessageSyncInterval() {
return delegate.getMessageSyncInterval();
}
@Override
public void setMessageSyncInterval(int seconds) {
delegate.setMessageSyncInterval(seconds);
}
@Override
public int getLockLease() {
return delegate.getLockLease();
}
@Override
public void setLockLease(int seconds) {
delegate.setLockLease(seconds);
}
@Override
public int getSearchTimeout() {
return delegate.getSearchTimeout();
}
@Override
public void setSearchTimeout(int seconds) {
delegate.setSearchTimeout(seconds);
}
@Override
public CacheServer addCacheServer() {
return delegate.addCacheServer();
}
@Override
public List<CacheServer> getCacheServers() {
return delegate.getCacheServers();
}
@Override
public void setGatewayConflictResolver(GatewayConflictResolver resolver) {
delegate.setGatewayConflictResolver(resolver);
}
@Override
public GatewayConflictResolver getGatewayConflictResolver() {
return delegate.getGatewayConflictResolver();
}
@Override
public void setIsServer(boolean isServer) {
delegate.setIsServer(isServer);
}
@Override
public boolean isServer() {
return delegate.isServer();
}
@Override
public void readyForEvents() {
delegate.readyForEvents();
}
@Override
public GatewaySenderFactory createGatewaySenderFactory() {
return delegate.createGatewaySenderFactory();
}
@Override
public AsyncEventQueueFactory createAsyncEventQueueFactory() {
return delegate.createAsyncEventQueueFactory();
}
@Override
public GatewayReceiverFactory createGatewayReceiverFactory() {
return delegate.createGatewayReceiverFactory();
}
@Override
public Set<GatewaySender> getGatewaySenders() {
return delegate.getGatewaySenders();
}
@Override
public GatewaySender getGatewaySender(String id) {
return delegate.getGatewaySender(id);
}
@Override
public Set<GatewayReceiver> getGatewayReceivers() {
return delegate.getGatewayReceivers();
}
@Override
public Set<AsyncEventQueue> getAsyncEventQueues() {
return delegate.getAsyncEventQueues();
}
@Override
public AsyncEventQueue getAsyncEventQueue(String id) {
return delegate.getAsyncEventQueue(id);
}
@Override
public Set<DistributedMember> getMembers() {
return delegate.getMembers();
}
@Override
public Set<DistributedMember> getAdminMembers() {
return delegate.getAdminMembers();
}
@Override
public Set<DistributedMember> getMembers(Region region) {
return delegate.getMembers(region);
}
@Override
public CacheSnapshotService getSnapshotService() {
return delegate.getSnapshotService();
}
@Override
public boolean isReconnecting() {
return delegate.isReconnecting();
}
@Override
public boolean waitUntilReconnected(long time, TimeUnit units) throws InterruptedException {
return delegate.waitUntilReconnected(time, units);
}
@Override
public void stopReconnecting() {
delegate.stopReconnecting();
}
@Override
public String getName() {
return delegate.getName();
}
@Override
public DistributedSystem getDistributedSystem() {
return delegate.getDistributedSystem();
}
@Override
public ResourceManager getResourceManager() {
return delegate.getResourceManager();
}
@Override
public void setCopyOnRead(boolean copyOnRead) {
delegate.setCopyOnRead(copyOnRead);
}
@Override
public boolean getCopyOnRead() {
return delegate.getCopyOnRead();
}
@Override
public <K, V> RegionAttributes<K, V> getRegionAttributes(String id) {
return delegate.getRegionAttributes(id);
}
@Override
public <K, V> void setRegionAttributes(String id, RegionAttributes<K, V> attrs) {
delegate.setRegionAttributes(id, attrs);
}
@Override
public <K, V> Map<String, RegionAttributes<K, V>> listRegionAttributes() {
return delegate.listRegionAttributes();
}
@Override
public void loadCacheXml(InputStream is)
throws TimeoutException, CacheWriterException, GatewayException, RegionExistsException {
delegate.loadCacheXml(is);
}
@Override
public LogWriter getLogger() {
return delegate.getLogger();
}
@Override
public LogWriter getSecurityLogger() {
return delegate.getSecurityLogger();
}
@Override
public DiskStore findDiskStore(String name) {
return delegate.findDiskStore(name);
}
@Override
public DiskStoreFactory createDiskStoreFactory() {
return delegate.createDiskStoreFactory();
}
@Override
public boolean getPdxReadSerialized() {
return delegate.getPdxReadSerialized();
}
@Override
public PdxSerializer getPdxSerializer() {
return delegate.getPdxSerializer();
}
@Override
public String getPdxDiskStore() {
return delegate.getPdxDiskStore();
}
@Override
public boolean getPdxPersistent() {
return delegate.getPdxPersistent();
}
@Override
public boolean getPdxIgnoreUnreadFields() {
return delegate.getPdxIgnoreUnreadFields();
}
@Override
public void registerPdxMetaData(Object instance) {
delegate.registerPdxMetaData(instance);
}
@Override
public CacheTransactionManager getCacheTransactionManager() {
return delegate.getCacheTransactionManager();
}
@Override
public Context getJNDIContext() {
return delegate.getJNDIContext();
}
@Override
public Declarable getInitializer() {
return delegate.getInitializer();
}
@Override
public Properties getInitializerProps() {
return delegate.getInitializerProps();
}
@Override
public CancelCriterion getCancelCriterion() {
return delegate.getCancelCriterion();
}
@Override
public PdxInstanceFactory createPdxInstanceFactory(String className) {
return delegate.createPdxInstanceFactory(className);
}
@Override
public PdxInstance createPdxEnum(String className, String enumName, int enumOrdinal) {
return delegate.createPdxEnum(className, enumName, enumOrdinal);
}
@Override
public void close() {
delegate.close();
}
@Override
public boolean isClosed() {
return delegate.isClosed();
}
@Override
public ExtensionPoint<Cache> getExtensionPoint() {
return delegate.getExtensionPoint();
}
@Override
public InternalDistributedMember getMyId() {
return delegate.getMyId();
}
@Override
public Collection<DiskStore> listDiskStores() {
return delegate.listDiskStores();
}
@Override
public Collection<DiskStore> listDiskStoresIncludingRegionOwned() {
return delegate.listDiskStoresIncludingRegionOwned();
}
@Override
public CqService getCqService() {
return delegate.getCqService();
}
@Override
public <T extends CacheService> T getService(Class<T> clazz) {
return delegate.getService(clazz);
}
@Override
public <T extends CacheService> Optional<T> getOptionalService(Class<T> clazz) {
return Optional.ofNullable(getService(clazz));
}
@Override
public Collection<CacheService> getServices() {
return delegate.getServices();
}
@Override
public SystemTimer getCCPTimer() {
return delegate.getCCPTimer();
}
@Override
public void cleanupForClient(CacheClientNotifier ccn, ClientProxyMembershipID client) {
delegate.cleanupForClient(ccn, client);
}
@Override
public void purgeCCPTimer() {
delegate.purgeCCPTimer();
}
@Override
public MemoryAllocator getOffHeapStore() {
return delegate.getOffHeapStore();
}
@Override
public DistributedLockService getPartitionedRegionLockService() {
return delegate.getPartitionedRegionLockService();
}
@Override
public PersistentMemberManager getPersistentMemberManager() {
return delegate.getPersistentMemberManager();
}
@Override
public Set<GatewaySender> getAllGatewaySenders() {
return delegate.getAllGatewaySenders();
}
@Override
public CachePerfStats getCachePerfStats() {
return delegate.getCachePerfStats();
}
@Override
public DistributionManager getDistributionManager() {
return delegate.getDistributionManager();
}
@Override
public void regionReinitialized(Region region) {
delegate.regionReinitialized(region);
}
@Override
public void setRegionByPath(String path, InternalRegion r) {
delegate.setRegionByPath(path, r);
}
@Override
public InternalResourceManager getInternalResourceManager() {
return delegate.getInternalResourceManager();
}
@Override
public ResourceAdvisor getResourceAdvisor() {
return delegate.getResourceAdvisor();
}
@Override
public boolean isCacheAtShutdownAll() {
return delegate.isCacheAtShutdownAll();
}
@Override
public boolean requiresNotificationFromPR(PartitionedRegion r) {
return delegate.requiresNotificationFromPR(r);
}
@Override
public <K, V> RegionAttributes<K, V> invokeRegionBefore(InternalRegion parent, String name,
RegionAttributes<K, V> attrs, InternalRegionArguments internalRegionArgs) {
return delegate.invokeRegionBefore(parent, name, attrs, internalRegionArgs);
}
@Override
public void invokeRegionAfter(InternalRegion region) {
delegate.invokeRegionAfter(region);
}
@Override
public void invokeBeforeDestroyed(InternalRegion region) {
delegate.invokeBeforeDestroyed(region);
}
@Override
public void invokeCleanupFailedInitialization(InternalRegion region) {
delegate.invokeCleanupFailedInitialization(region);
}
@Override
public TXManagerImpl getTXMgr() {
return delegate.getTXMgr();
}
@Override
public boolean forcedDisconnect() {
return delegate.forcedDisconnect();
}
@Override
public InternalResourceManager getInternalResourceManager(boolean checkCancellationInProgress) {
return delegate.getInternalResourceManager();
}
@Override
public boolean isCopyOnRead() {
return delegate.isCopyOnRead();
}
@Override
public TombstoneService getTombstoneService() {
return delegate.getTombstoneService();
}
@Override
public QueryService getLocalQueryService() {
return delegate.getLocalQueryService();
}
@Override
public void registerInterestStarted() {
delegate.registerInterestStarted();
}
@Override
public void registerInterestCompleted() {
delegate.registerInterestCompleted();
}
@Override
public void regionReinitializing(String fullPath) {
delegate.regionReinitializing(fullPath);
}
@Override
public void unregisterReinitializingRegion(String fullPath) {
delegate.unregisterReinitializingRegion(fullPath);
}
@Override
public boolean removeRoot(InternalRegion rootRgn) {
return delegate.removeRoot(rootRgn);
}
@Override
public Executor getEventThreadPool() {
return delegate.getEventThreadPool();
}
@Override
public boolean keepDurableSubscriptionsAlive() {
return delegate.keepDurableSubscriptionsAlive();
}
@Override
public CacheClosedException getCacheClosedException(String reason) {
return delegate.getCacheClosedException(reason);
}
@Override
public CacheClosedException getCacheClosedException(String reason, Throwable cause) {
return delegate.getCacheClosedException(reason, cause);
}
@Override
public TypeRegistry getPdxRegistry() {
return delegate.getPdxRegistry();
}
@Override
public DiskStoreImpl getOrCreateDefaultDiskStore() {
return delegate.getOrCreateDefaultDiskStore();
}
@Override
public ExpirationScheduler getExpirationScheduler() {
return delegate.getExpirationScheduler();
}
@Override
public TransactionManager getJTATransactionManager() {
return delegate.getJTATransactionManager();
}
@Override
public TXManagerImpl getTxManager() {
return delegate.getTxManager();
}
@Override
public void beginDestroy(String path, DistributedRegion region) {
delegate.beginDestroy(path, region);
}
@Override
public void endDestroy(String path, DistributedRegion region) {
delegate.endDestroy(path, region);
}
@Override
public ClientMetadataService getClientMetadataService() {
return delegate.getClientMetadataService();
}
@Override
public long cacheTimeMillis() {
return delegate.cacheTimeMillis();
}
@Override
public URL getCacheXmlURL() {
return delegate.getCacheXmlURL();
}
@Override
public List<File> getBackupFiles() {
return delegate.getBackupFiles();
}
@Override
public boolean isClient() {
return delegate.isClient();
}
@Override
public InternalDistributedSystem getInternalDistributedSystem() {
return delegate.getInternalDistributedSystem();
}
@Override
public void addRegionListener(RegionListener regionListener) {
delegate.addRegionListener(regionListener);
}
@Override
public void removeRegionListener(RegionListener regionListener) {
delegate.removeRegionListener(regionListener);
}
@Override
public Set<RegionListener> getRegionListeners() {
return delegate.getRegionListeners();
}
@Override
public CacheConfig getCacheConfig() {
return delegate.getCacheConfig();
}
@Override
public boolean getPdxReadSerializedByAnyGemFireServices() {
return delegate.getPdxReadSerializedByAnyGemFireServices();
}
@Override
public void setDeclarativeCacheConfig(CacheConfig cacheConfig) {
delegate.setDeclarativeCacheConfig(cacheConfig);
}
@Override
public void initializePdxRegistry() {
delegate.initializePdxRegistry();
}
@Override
public void readyDynamicRegionFactory() {
delegate.readyDynamicRegionFactory();
}
@Override
public void setBackupFiles(List<File> backups) {
delegate.setBackupFiles(backups);
}
@Override
public void addDeclarableProperties(Map<Declarable, Properties> mapOfNewDeclarableProps) {
delegate.addDeclarableProperties(mapOfNewDeclarableProps);
}
@Override
public void setInitializer(Declarable initializer, Properties initializerProps) {
delegate.setInitializer(initializer, initializerProps);
}
@Override
public boolean hasPool() {
return delegate.hasPool();
}
@Override
public DiskStoreFactory createDiskStoreFactory(DiskStoreAttributes attrs) {
return delegate.createDiskStoreFactory(attrs);
}
@Override
public BackupService getBackupService() {
return delegate.getBackupService();
}
@Override
@VisibleForTesting
public Throwable getDisconnectCause() {
return delegate.getDisconnectCause();
}
@Override
public void addPartitionedRegion(PartitionedRegion region) {
delegate.addPartitionedRegion(region);
}
@Override
public void removePartitionedRegion(PartitionedRegion region) {
delegate.removePartitionedRegion(region);
}
@Override
public void addDiskStore(DiskStoreImpl dsi) {
delegate.addDiskStore(dsi);
}
@Override
public TXEntryStateFactory getTXEntryStateFactory() {
return delegate.getTXEntryStateFactory();
}
@Override
public EventTrackerExpiryTask getEventTrackerTask() {
return delegate.getEventTrackerTask();
}
@Override
public void removeDiskStore(DiskStoreImpl diskStore) {
delegate.removeDiskStore(diskStore);
}
@Override
public void addGatewaySender(GatewaySender sender) {
delegate.addGatewaySender(sender);
}
@Override
public void addAsyncEventQueue(AsyncEventQueueImpl asyncQueue) {
delegate.addAsyncEventQueue(asyncQueue);
}
@Override
public void removeAsyncEventQueue(AsyncEventQueue asyncQueue) {
delegate.removeAsyncEventQueue(asyncQueue);
}
@Override
public QueryMonitor getQueryMonitor() {
return delegate.getQueryMonitor();
}
@Override
public void close(String reason, Throwable systemFailureCause, boolean keepAlive, boolean keepDS,
boolean skipAwait) {
delegate.close(reason, systemFailureCause, keepAlive, keepDS, skipAwait);
}
@Override
public JmxManagerAdvisor getJmxManagerAdvisor() {
return delegate.getJmxManagerAdvisor();
}
@Override
public List<Properties> getDeclarableProperties(String className) {
return delegate.getDeclarableProperties(className);
}
@Override
public long getUpTime() {
return delegate.getUpTime();
}
@Override
public void addRegionOwnedDiskStore(DiskStoreImpl dsi) {
delegate.addRegionOwnedDiskStore(dsi);
}
@Override
public DiskStoreMonitor getDiskStoreMonitor() {
return delegate.getDiskStoreMonitor();
}
@Override
public void close(String reason, Throwable optionalCause) {
delegate.close(reason, optionalCause);
}
@Override
public List<InternalCacheServer> getCacheServersAndGatewayReceiver() {
return delegate.getCacheServersAndGatewayReceiver();
}
@Override
public boolean isGlobalRegionInitializing(String fullPath) {
return delegate.isGlobalRegionInitializing(fullPath);
}
@Override
public DistributionAdvisor getDistributionAdvisor() {
return delegate.getDistributionAdvisor();
}
@Override
public boolean isQueryMonitorDisabledForLowMemory() {
return delegate.isQueryMonitorDisabledForLowMemory();
}
@Override
public boolean isRESTServiceRunning() {
return delegate.isRESTServiceRunning();
}
@Override
public InternalLogWriter getInternalLogWriter() {
return delegate.getInternalLogWriter();
}
@Override
public InternalLogWriter getSecurityInternalLogWriter() {
return delegate.getSecurityInternalLogWriter();
}
@Override
public Set<InternalRegion> getApplicationRegions() {
return delegate.getApplicationRegions();
}
@Override
public void removeGatewaySender(GatewaySender sender) {
delegate.removeGatewaySender(sender);
}
@Override
public DistributedLockService getGatewaySenderLockService() {
return delegate.getGatewaySenderLockService();
}
@Override
@VisibleForTesting
public RestAgent getRestAgent() {
return delegate.getRestAgent();
}
@Override
public Properties getDeclarableProperties(Declarable declarable) {
return delegate.getDeclarableProperties(declarable);
}
@Override
public void setRESTServiceRunning(boolean isRESTServiceRunning) {
delegate.setRESTServiceRunning(isRESTServiceRunning);
}
@Override
public void close(String reason, boolean keepAlive, boolean keepDS) {
delegate.close(reason, keepAlive, keepDS);
}
@Override
public void addGatewayReceiver(GatewayReceiver receiver) {
delegate.addGatewayReceiver(receiver);
}
@Override
public void removeGatewayReceiver(GatewayReceiver receiver) {
delegate.removeGatewayReceiver(receiver);
}
@Override
public InternalCacheServer addGatewayReceiverServer(GatewayReceiver receiver) {
return delegate.addGatewayReceiverServer(receiver);
}
@Override
@VisibleForTesting
public boolean removeCacheServer(CacheServer cacheServer) {
return delegate.removeCacheServer(cacheServer);
}
@Override
public boolean removeGatewayReceiverServer(InternalCacheServer receiverServer) {
return delegate.removeGatewayReceiverServer(receiverServer);
}
@Override
@VisibleForTesting
public void setReadSerializedForTest(boolean value) {
delegate.setReadSerializedForTest(value);
}
@Override
public void setReadSerializedForCurrentThread(boolean value) {
delegate.setReadSerializedForCurrentThread(value);
}
@Override
public PdxInstanceFactory createPdxInstanceFactory(String className, boolean expectDomainClass) {
return delegate.createPdxInstanceFactory(className, expectDomainClass);
}
@Override
public void waitForRegisterInterestsInProgress() {
delegate.waitForRegisterInterestsInProgress();
}
@Override
public void reLoadClusterConfiguration() throws IOException, ClassNotFoundException {
delegate.reLoadClusterConfiguration();
}
@Override
public SecurityService getSecurityService() {
return delegate.getSecurityService();
}
@Override
public boolean hasPersistentRegion() {
return delegate.hasPersistentRegion();
}
@Override
public void shutDownAll() {
delegate.shutDownAll();
}
@Override
public void invokeRegionEntrySynchronizationListenersAfterSynchronization(
InternalDistributedMember sender, InternalRegion region, List<Entry> entriesToSynchronize) {
delegate.invokeRegionEntrySynchronizationListenersAfterSynchronization(sender, region,
entriesToSynchronize);
}
@Override
public QueryService getQueryService() {
return delegate.getQueryService();
}
@Override
public InternalQueryService getInternalQueryService() {
return delegate.getInternalQueryService();
}
@Override
public void lockDiskStore(String diskStoreName) {
}
@Override
public void unlockDiskStore(String diskStoreName) {
}
@Override
public JSONFormatter getJsonFormatter() {
return delegate.getJsonFormatter();
}
@Override
@VisibleForTesting
public Set<AsyncEventQueue> getAsyncEventQueues(boolean visibleOnly) {
return delegate.getAsyncEventQueues(visibleOnly);
}
@Override
@VisibleForTesting
public void closeDiskStores() {
delegate.closeDiskStores();
}
@Override
public Object convertPdxInstanceIfNeeded(Object obj, boolean preferCD) {
return delegate.convertPdxInstanceIfNeeded(obj, preferCD);
}
@Override
public Boolean getPdxReadSerializedOverride() {
return delegate.getPdxReadSerializedOverride();
}
@Override
public void setPdxReadSerializedOverride(boolean pdxReadSerialized) {
delegate.setPdxReadSerializedOverride(pdxReadSerialized);
}
@Override
public InternalCacheForClientAccess getCacheForProcessingClientRequests() {
return this;
}
@Override
public void initialize() {
// do nothing
}
@Override
public void throwCacheExistsException() {
delegate.throwCacheExistsException();
}
@Override
public MeterRegistry getMeterRegistry() {
return delegate.getMeterRegistry();
}
@Override
public void saveCacheXmlForReconnect() {
delegate.saveCacheXmlForReconnect();
}
@Override
@VisibleForTesting
public HeapEvictor getHeapEvictor() {
return delegate.getHeapEvictor();
}
@Override
@VisibleForTesting
public OffHeapEvictor getOffHeapEvictor() {
return delegate.getOffHeapEvictor();
}
@Override
public StatisticsClock getStatisticsClock() {
return delegate.getStatisticsClock();
}
}
|
apache/hadoop | 34,943 | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/FileSystemTimelineWriter.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.client.api.impl;
import java.io.Closeable;
import java.io.Flushable;
import java.io.IOException;
import java.io.OutputStream;
import java.net.URI;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.Timer;
import java.util.TimerTask;
import java.util.Map.Entry;
import java.util.concurrent.locks.ReentrantLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock.ReadLock;
import java.util.concurrent.locks.ReentrantReadWriteLock.WriteLock;
import net.jodah.failsafe.RetryPolicy;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability.Unstable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.Time;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.timeline.TimelineDomain;
import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity;
import org.apache.hadoop.yarn.api.records.timeline.TimelineEntityGroupId;
import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.exceptions.YarnException;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.core.util.MinimalPrettyPrinter;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.SerializationFeature;
import com.fasterxml.jackson.databind.type.TypeFactory;
import com.fasterxml.jackson.module.jaxb.JaxbAnnotationIntrospector;
import javax.ws.rs.client.Client;
/**
* A simple writer class for storing Timeline data in any storage that
* implements a basic FileSystem interface.
* This writer is used for ATSv1.5.
*/
@Private
@Unstable
public class FileSystemTimelineWriter extends TimelineWriter{
private static final Logger LOG = LoggerFactory
.getLogger(FileSystemTimelineWriter.class);
// App log directory must be readable by group so server can access logs
// and writable by group so it can be deleted by server
private static final short APP_LOG_DIR_PERMISSIONS = 0770;
// Logs must be readable by group so server can access them
private static final short FILE_LOG_PERMISSIONS = 0640;
private static final String DOMAIN_LOG_PREFIX = "domainlog-";
private static final String SUMMARY_LOG_PREFIX = "summarylog-";
private static final String ENTITY_LOG_PREFIX = "entitylog-";
private Path activePath = null;
private FileSystem fs = null;
private Set<String> summaryEntityTypes;
private ObjectMapper objMapper = null;
private long flushIntervalSecs;
private long cleanIntervalSecs;
private long ttl;
private LogFDsCache logFDsCache = null;
private boolean isAppendSupported;
private final AttemptDirCache attemptDirCache;
public FileSystemTimelineWriter(Configuration conf,
UserGroupInformation authUgi, Client client, URI resURI,
RetryPolicy<Object> retryPolicy)
throws IOException {
super(authUgi, client, resURI, retryPolicy);
Configuration fsConf = new Configuration(conf);
activePath = new Path(fsConf.get(
YarnConfiguration
.TIMELINE_SERVICE_ENTITYGROUP_FS_STORE_ACTIVE_DIR,
YarnConfiguration
.TIMELINE_SERVICE_ENTITYGROUP_FS_STORE_ACTIVE_DIR_DEFAULT));
fs = FileSystem.newInstance(activePath.toUri(), fsConf);
// raise FileNotFoundException if the path is not found
fs.getFileStatus(activePath);
summaryEntityTypes = new HashSet<String>(
conf.getStringCollection(YarnConfiguration
.TIMELINE_SERVICE_ENTITYGROUP_FS_STORE_SUMMARY_ENTITY_TYPES));
flushIntervalSecs = conf.getLong(
YarnConfiguration
.TIMELINE_SERVICE_CLIENT_FD_FLUSH_INTERVAL_SECS,
YarnConfiguration
.TIMELINE_SERVICE_CLIENT_FD_FLUSH_INTERVAL_SECS_DEFAULT);
cleanIntervalSecs = conf.getLong(
YarnConfiguration
.TIMELINE_SERVICE_CLIENT_FD_CLEAN_INTERVAL_SECS,
YarnConfiguration
.TIMELINE_SERVICE_CLIENT_FD_CLEAN_INTERVAL_SECS_DEFAULT);
ttl = conf.getLong(
YarnConfiguration.TIMELINE_SERVICE_CLIENT_FD_RETAIN_SECS,
YarnConfiguration.TIMELINE_SERVICE_CLIENT_FD_RETAIN_SECS_DEFAULT);
long timerTaskTTL = conf.getLong(
YarnConfiguration.TIMELINE_SERVICE_CLIENT_INTERNAL_TIMERS_TTL_SECS,
YarnConfiguration
.TIMELINE_SERVICE_CLIENT_INTERNAL_TIMERS_TTL_SECS_DEFAULT);
logFDsCache =
new LogFDsCache(flushIntervalSecs, cleanIntervalSecs, ttl,
timerTaskTTL);
this.isAppendSupported = conf.getBoolean(
YarnConfiguration.TIMELINE_SERVICE_ENTITYFILE_FS_SUPPORT_APPEND, true);
boolean storeInsideUserDir = conf.getBoolean(
YarnConfiguration.TIMELINE_SERVICE_ENTITYGROUP_FS_STORE_WITH_USER_DIR,
false);
objMapper = createObjectMapper();
int attemptDirCacheSize = conf.getInt(
YarnConfiguration
.TIMELINE_SERVICE_CLIENT_INTERNAL_ATTEMPT_DIR_CACHE_SIZE,
YarnConfiguration
.DEFAULT_TIMELINE_SERVICE_CLIENT_INTERNAL_ATTEMPT_DIR_CACHE_SIZE);
attemptDirCache = new AttemptDirCache(attemptDirCacheSize, fs, activePath,
authUgi, storeInsideUserDir);
if (LOG.isDebugEnabled()) {
StringBuilder debugMSG = new StringBuilder();
debugMSG.append(
YarnConfiguration.TIMELINE_SERVICE_CLIENT_FD_FLUSH_INTERVAL_SECS
+ "=" + flushIntervalSecs + ", " +
YarnConfiguration.TIMELINE_SERVICE_CLIENT_FD_CLEAN_INTERVAL_SECS
+ "=" + cleanIntervalSecs + ", " +
YarnConfiguration.TIMELINE_SERVICE_CLIENT_FD_RETAIN_SECS
+ "=" + ttl + ", " +
YarnConfiguration.TIMELINE_SERVICE_ENTITYFILE_FS_SUPPORT_APPEND
+ "=" + isAppendSupported + ", " +
YarnConfiguration.TIMELINE_SERVICE_ENTITYGROUP_FS_STORE_WITH_USER_DIR
+ "=" + storeInsideUserDir + ", " +
YarnConfiguration.TIMELINE_SERVICE_ENTITYGROUP_FS_STORE_ACTIVE_DIR
+ "=" + activePath);
if (summaryEntityTypes != null && !summaryEntityTypes.isEmpty()) {
debugMSG.append(", " + YarnConfiguration
.TIMELINE_SERVICE_ENTITYGROUP_FS_STORE_SUMMARY_ENTITY_TYPES
+ " = " + summaryEntityTypes);
}
LOG.debug(debugMSG.toString());
}
}
@Override
public String toString() {
return "FileSystemTimelineWriter writing to " + activePath;
}
@Override
public TimelinePutResponse putEntities(
ApplicationAttemptId appAttemptId, TimelineEntityGroupId groupId,
TimelineEntity... entities) throws IOException, YarnException {
if (appAttemptId == null) {
return putEntities(entities);
}
List<TimelineEntity> entitiesToDBStore = new ArrayList<TimelineEntity>();
List<TimelineEntity> entitiesToSummaryCache
= new ArrayList<TimelineEntity>();
List<TimelineEntity> entitiesToEntityCache
= new ArrayList<TimelineEntity>();
Path attemptDir = attemptDirCache.getAppAttemptDir(appAttemptId);
for (TimelineEntity entity : entities) {
if (summaryEntityTypes.contains(entity.getEntityType())) {
entitiesToSummaryCache.add(entity);
} else {
if (groupId != null) {
entitiesToEntityCache.add(entity);
} else {
entitiesToDBStore.add(entity);
}
}
}
if (!entitiesToSummaryCache.isEmpty()) {
Path summaryLogPath =
new Path(attemptDir, SUMMARY_LOG_PREFIX + appAttemptId.toString());
LOG.debug("Writing summary log for {} to {}", appAttemptId,
summaryLogPath);
this.logFDsCache.writeSummaryEntityLogs(fs, summaryLogPath, objMapper,
appAttemptId, entitiesToSummaryCache, isAppendSupported);
}
if (!entitiesToEntityCache.isEmpty()) {
Path entityLogPath =
new Path(attemptDir, ENTITY_LOG_PREFIX + groupId.toString());
LOG.debug("Writing entity log for {} to {}", groupId, entityLogPath);
this.logFDsCache.writeEntityLogs(fs, entityLogPath, objMapper,
appAttemptId, groupId, entitiesToEntityCache, isAppendSupported);
}
if (!entitiesToDBStore.isEmpty()) {
putEntities(entitiesToDBStore.toArray(
new TimelineEntity[entitiesToDBStore.size()]));
}
return new TimelinePutResponse();
}
@Override
public void putDomain(ApplicationAttemptId appAttemptId,
TimelineDomain domain) throws IOException, YarnException {
if (appAttemptId == null) {
putDomain(domain);
} else {
writeDomain(appAttemptId, domain);
}
}
@Override
public synchronized void close() throws Exception {
if (logFDsCache != null) {
LOG.debug("Closing cache");
logFDsCache.flush();
}
IOUtils.cleanupWithLogger(LOG, logFDsCache, fs);
}
@Override
public void flush() throws IOException {
if (logFDsCache != null) {
LOG.debug("Flushing cache");
logFDsCache.flush();
}
}
private ObjectMapper createObjectMapper() {
ObjectMapper mapper = new ObjectMapper();
mapper.setAnnotationIntrospector(
new JaxbAnnotationIntrospector(TypeFactory.defaultInstance()));
mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL);
mapper.configure(SerializationFeature.FLUSH_AFTER_WRITE_VALUE, false);
return mapper;
}
private void writeDomain(ApplicationAttemptId appAttemptId,
TimelineDomain domain) throws IOException {
Path domainLogPath =
new Path(attemptDirCache.getAppAttemptDir(appAttemptId),
DOMAIN_LOG_PREFIX + appAttemptId.toString());
LOG.debug("Writing domains for {} to {}", appAttemptId, domainLogPath);
this.logFDsCache.writeDomainLog(
fs, domainLogPath, objMapper, domain, isAppendSupported);
}
private static class DomainLogFD extends LogFD {
public DomainLogFD(FileSystem fs, Path logPath, ObjectMapper objMapper,
boolean isAppendSupported) throws IOException {
super(fs, logPath, objMapper, isAppendSupported);
}
public void writeDomain(TimelineDomain domain)
throws IOException {
getObjectMapper().writeValue(getJsonGenerator(), domain);
updateLastModifiedTime(Time.monotonicNow());
}
}
private static class EntityLogFD extends LogFD {
public EntityLogFD(FileSystem fs, Path logPath, ObjectMapper objMapper,
boolean isAppendSupported) throws IOException {
super(fs, logPath, objMapper, isAppendSupported);
}
public void writeEntities(List<TimelineEntity> entities)
throws IOException {
if (writerClosed()) {
prepareForWrite();
}
LOG.debug("Writing entity list of size {}", entities.size());
for (TimelineEntity entity : entities) {
getObjectMapper().writeValue(getJsonGenerator(), entity);
}
updateLastModifiedTime(Time.monotonicNow());
}
}
private static class LogFD {
private FSDataOutputStream stream;
private ObjectMapper objMapper;
private JsonGenerator jsonGenerator;
private long lastModifiedTime;
private final boolean isAppendSupported;
private final ReentrantLock fdLock = new ReentrantLock();
private final FileSystem fs;
private final Path logPath;
public LogFD(FileSystem fs, Path logPath, ObjectMapper objMapper,
boolean isAppendSupported) throws IOException {
this.fs = fs;
this.logPath = logPath;
this.isAppendSupported = isAppendSupported;
this.objMapper = objMapper;
prepareForWrite();
}
public void close() {
if (stream != null) {
IOUtils.cleanupWithLogger(LOG, jsonGenerator);
IOUtils.cleanupWithLogger(LOG, stream);
stream = null;
jsonGenerator = null;
}
}
public void flush() throws IOException {
if (stream != null) {
jsonGenerator.flush();
stream.hflush();
}
}
public long getLastModifiedTime() {
return this.lastModifiedTime;
}
protected void prepareForWrite() throws IOException{
this.stream = createLogFileStream(fs, logPath);
this.jsonGenerator = new JsonFactory().createGenerator(
(OutputStream)stream);
this.jsonGenerator.setPrettyPrinter(new MinimalPrettyPrinter("\n"));
this.lastModifiedTime = Time.monotonicNow();
}
protected boolean writerClosed() {
return stream == null;
}
private FSDataOutputStream createLogFileStream(FileSystem fileSystem,
Path logPathToCreate)
throws IOException {
FSDataOutputStream streamToCreate;
if (!isAppendSupported) {
logPathToCreate =
new Path(logPathToCreate.getParent(),
(logPathToCreate.getName() + "_" + Time.monotonicNow()));
}
if (!fileSystem.exists(logPathToCreate)) {
streamToCreate = fileSystem.create(logPathToCreate, false);
fileSystem.setPermission(logPathToCreate,
new FsPermission(FILE_LOG_PERMISSIONS));
} else {
streamToCreate = fileSystem.append(logPathToCreate);
}
return streamToCreate;
}
public void lock() {
this.fdLock.lock();
}
public void unlock() {
this.fdLock.unlock();
}
protected JsonGenerator getJsonGenerator() {
return jsonGenerator;
}
protected ObjectMapper getObjectMapper() {
return objMapper;
}
protected void updateLastModifiedTime(long updatedTime) {
this.lastModifiedTime = updatedTime;
}
}
private static class LogFDsCache implements Closeable, Flushable{
private DomainLogFD domainLogFD;
private Map<ApplicationAttemptId, EntityLogFD> summanyLogFDs;
private Map<ApplicationAttemptId, HashMap<TimelineEntityGroupId,
EntityLogFD>> entityLogFDs;
private Timer flushTimer = null;
private Timer cleanInActiveFDsTimer = null;
private Timer monitorTaskTimer = null;
private final long ttl;
private final ReentrantLock domainFDLocker = new ReentrantLock();
private final ReentrantLock summaryTableLocker = new ReentrantLock();
private final ReentrantLock entityTableLocker = new ReentrantLock();
private final ReentrantLock summaryTableCopyLocker = new ReentrantLock();
private final ReentrantLock entityTableCopyLocker = new ReentrantLock();
private volatile boolean serviceStopped = false;
private volatile boolean timerTaskStarted = false;
private final ReentrantLock timerTaskLocker = new ReentrantLock();
private final long flushIntervalSecs;
private final long cleanIntervalSecs;
private final long timerTaskRetainTTL;
private volatile long timeStampOfLastWrite = Time.monotonicNow();
private final ReadLock timerTasksMonitorReadLock;
private final WriteLock timerTasksMonitorWriteLock;
public LogFDsCache(long flushIntervalSecs, long cleanIntervalSecs,
long ttl, long timerTaskRetainTTL) {
domainLogFD = null;
summanyLogFDs = new HashMap<ApplicationAttemptId, EntityLogFD>();
entityLogFDs = new HashMap<ApplicationAttemptId,
HashMap<TimelineEntityGroupId, EntityLogFD>>();
this.ttl = ttl * 1000;
this.flushIntervalSecs = flushIntervalSecs;
this.cleanIntervalSecs = cleanIntervalSecs;
long timerTaskRetainTTLVar = timerTaskRetainTTL * 1000;
if (timerTaskRetainTTLVar > this.ttl) {
this.timerTaskRetainTTL = timerTaskRetainTTLVar;
} else {
this.timerTaskRetainTTL = this.ttl + 2 * 60 * 1000;
LOG.warn("The specific " + YarnConfiguration
.TIMELINE_SERVICE_CLIENT_INTERNAL_TIMERS_TTL_SECS + " : "
+ timerTaskRetainTTL + " is invalid, because it is less than or "
+ "equal to " + YarnConfiguration
.TIMELINE_SERVICE_CLIENT_FD_RETAIN_SECS + " : " + ttl + ". Use "
+ YarnConfiguration.TIMELINE_SERVICE_CLIENT_FD_RETAIN_SECS + " : "
+ ttl + " + 120s instead.");
}
ReentrantReadWriteLock lock = new ReentrantReadWriteLock();
this.timerTasksMonitorReadLock = lock.readLock();
this.timerTasksMonitorWriteLock = lock.writeLock();
}
@Override
public void flush() throws IOException {
this.domainFDLocker.lock();
try {
if (domainLogFD != null) {
domainLogFD.flush();
}
} finally {
this.domainFDLocker.unlock();
}
flushSummaryFDMap(copySummaryLogFDs(summanyLogFDs));
flushEntityFDMap(copyEntityLogFDs(entityLogFDs));
}
private Map<ApplicationAttemptId, EntityLogFD> copySummaryLogFDs(
Map<ApplicationAttemptId, EntityLogFD> summanyLogFDsToCopy) {
summaryTableCopyLocker.lock();
try {
return new HashMap<ApplicationAttemptId, EntityLogFD>(
summanyLogFDsToCopy);
} finally {
summaryTableCopyLocker.unlock();
}
}
private Map<ApplicationAttemptId, HashMap<TimelineEntityGroupId,
EntityLogFD>> copyEntityLogFDs(Map<ApplicationAttemptId,
HashMap<TimelineEntityGroupId, EntityLogFD>> entityLogFDsToCopy) {
entityTableCopyLocker.lock();
try {
return new HashMap<ApplicationAttemptId, HashMap<TimelineEntityGroupId,
EntityLogFD>>(entityLogFDsToCopy);
} finally {
entityTableCopyLocker.unlock();
}
}
private void flushSummaryFDMap(Map<ApplicationAttemptId,
EntityLogFD> logFDs) throws IOException {
if (!logFDs.isEmpty()) {
for (Entry<ApplicationAttemptId, EntityLogFD> logFDEntry : logFDs
.entrySet()) {
EntityLogFD logFD = logFDEntry.getValue();
logFD.lock();
try {
logFD.flush();
} finally {
logFD.unlock();
}
}
}
}
private void flushEntityFDMap(Map<ApplicationAttemptId, HashMap<
TimelineEntityGroupId, EntityLogFD>> logFDs) throws IOException {
if (!logFDs.isEmpty()) {
for (Entry<ApplicationAttemptId, HashMap<TimelineEntityGroupId,
EntityLogFD>> logFDMapEntry : logFDs.entrySet()) {
HashMap<TimelineEntityGroupId, EntityLogFD> logFDMap
= logFDMapEntry.getValue();
for (Entry<TimelineEntityGroupId, EntityLogFD> logFDEntry
: logFDMap.entrySet()) {
EntityLogFD logFD = logFDEntry.getValue();
logFD.lock();
try {
logFD.flush();
} finally {
logFD.unlock();
}
}
}
}
}
private class FlushTimerTask extends TimerTask {
@Override
public void run() {
try {
flush();
} catch (Exception e) {
LOG.debug("{}", e);
}
}
}
private void cleanInActiveFDs() {
long currentTimeStamp = Time.monotonicNow();
this.domainFDLocker.lock();
try {
if (domainLogFD != null) {
if (currentTimeStamp - domainLogFD.getLastModifiedTime() >= ttl) {
domainLogFD.close();
domainLogFD = null;
}
}
} finally {
this.domainFDLocker.unlock();
}
cleanInActiveSummaryFDsforMap(copySummaryLogFDs(summanyLogFDs),
currentTimeStamp);
cleanInActiveEntityFDsforMap(copyEntityLogFDs(entityLogFDs),
currentTimeStamp);
}
private void cleanInActiveSummaryFDsforMap(
Map<ApplicationAttemptId, EntityLogFD> logFDs,
long currentTimeStamp) {
if (!logFDs.isEmpty()) {
for (Entry<ApplicationAttemptId, EntityLogFD> logFDEntry : logFDs
.entrySet()) {
EntityLogFD logFD = logFDEntry.getValue();
logFD.lock();
try {
if (currentTimeStamp - logFD.getLastModifiedTime() >= ttl) {
logFD.close();
}
} finally {
logFD.unlock();
}
}
}
}
private void cleanInActiveEntityFDsforMap(Map<ApplicationAttemptId,
HashMap<TimelineEntityGroupId, EntityLogFD>> logFDs,
long currentTimeStamp) {
if (!logFDs.isEmpty()) {
for (Entry<ApplicationAttemptId, HashMap<
TimelineEntityGroupId, EntityLogFD>> logFDMapEntry
: logFDs.entrySet()) {
HashMap<TimelineEntityGroupId, EntityLogFD> logFDMap
= logFDMapEntry.getValue();
for (Entry<TimelineEntityGroupId, EntityLogFD> logFDEntry
: logFDMap.entrySet()) {
EntityLogFD logFD = logFDEntry.getValue();
logFD.lock();
try {
if (currentTimeStamp - logFD.getLastModifiedTime() >= ttl) {
logFD.close();
}
} finally {
logFD.unlock();
}
}
}
}
}
private class CleanInActiveFDsTask extends TimerTask {
@Override
public void run() {
try {
cleanInActiveFDs();
} catch (Exception e) {
LOG.warn(e.toString());
}
}
}
private class TimerMonitorTask extends TimerTask {
@Override
public void run() {
timerTasksMonitorWriteLock.lock();
try {
monitorTimerTasks();
} finally {
timerTasksMonitorWriteLock.unlock();
}
}
}
private void monitorTimerTasks() {
if (Time.monotonicNow() - this.timeStampOfLastWrite
>= this.timerTaskRetainTTL) {
cancelAndCloseTimerTasks();
timerTaskStarted = false;
} else {
if (this.monitorTaskTimer != null) {
this.monitorTaskTimer.schedule(new TimerMonitorTask(),
this.timerTaskRetainTTL);
}
}
}
@Override
public void close() throws IOException {
serviceStopped = true;
cancelAndCloseTimerTasks();
}
private void cancelAndCloseTimerTasks() {
if (flushTimer != null) {
flushTimer.cancel();
flushTimer = null;
}
if (cleanInActiveFDsTimer != null) {
cleanInActiveFDsTimer.cancel();
cleanInActiveFDsTimer = null;
}
if (monitorTaskTimer != null) {
monitorTaskTimer.cancel();
monitorTaskTimer = null;
}
this.domainFDLocker.lock();
try {
if (domainLogFD != null) {
domainLogFD.close();
domainLogFD = null;
}
} finally {
this.domainFDLocker.unlock();
}
closeSummaryFDs(summanyLogFDs);
closeEntityFDs(entityLogFDs);
}
private void closeEntityFDs(Map<ApplicationAttemptId,
HashMap<TimelineEntityGroupId, EntityLogFD>> logFDs) {
entityTableLocker.lock();
try {
if (!logFDs.isEmpty()) {
for (Entry<ApplicationAttemptId, HashMap<TimelineEntityGroupId,
EntityLogFD>> logFDMapEntry : logFDs.entrySet()) {
HashMap<TimelineEntityGroupId, EntityLogFD> logFDMap
= logFDMapEntry.getValue();
for (Entry<TimelineEntityGroupId, EntityLogFD> logFDEntry
: logFDMap.entrySet()) {
EntityLogFD logFD = logFDEntry.getValue();
try {
logFD.lock();
logFD.close();
} finally {
logFD.unlock();
}
}
}
}
} finally {
entityTableLocker.unlock();
}
}
private void closeSummaryFDs(
Map<ApplicationAttemptId, EntityLogFD> logFDs) {
summaryTableLocker.lock();
try {
if (!logFDs.isEmpty()) {
for (Entry<ApplicationAttemptId, EntityLogFD> logFDEntry
: logFDs.entrySet()) {
EntityLogFD logFD = logFDEntry.getValue();
try {
logFD.lock();
logFD.close();
} finally {
logFD.unlock();
}
}
}
} finally {
summaryTableLocker.unlock();
}
}
public void writeDomainLog(FileSystem fs, Path logPath,
ObjectMapper objMapper, TimelineDomain domain,
boolean isAppendSupported) throws IOException {
checkAndStartTimeTasks();
this.domainFDLocker.lock();
try {
if (this.domainLogFD != null) {
this.domainLogFD.writeDomain(domain);
} else {
this.domainLogFD =
new DomainLogFD(fs, logPath, objMapper, isAppendSupported);
this.domainLogFD.writeDomain(domain);
}
} finally {
this.domainFDLocker.unlock();
}
}
public void writeEntityLogs(FileSystem fs, Path entityLogPath,
ObjectMapper objMapper, ApplicationAttemptId appAttemptId,
TimelineEntityGroupId groupId, List<TimelineEntity> entitiesToEntity,
boolean isAppendSupported) throws IOException{
checkAndStartTimeTasks();
writeEntityLogs(fs, entityLogPath, objMapper, appAttemptId,
groupId, entitiesToEntity, isAppendSupported, this.entityLogFDs);
}
private void writeEntityLogs(FileSystem fs, Path logPath,
ObjectMapper objMapper, ApplicationAttemptId attemptId,
TimelineEntityGroupId groupId, List<TimelineEntity> entities,
boolean isAppendSupported, Map<ApplicationAttemptId, HashMap<
TimelineEntityGroupId, EntityLogFD>> logFDs) throws IOException {
HashMap<TimelineEntityGroupId, EntityLogFD>logMapFD
= logFDs.get(attemptId);
if (logMapFD != null) {
EntityLogFD logFD = logMapFD.get(groupId);
if (logFD != null) {
logFD.lock();
try {
if (serviceStopped) {
return;
}
logFD.writeEntities(entities);
} finally {
logFD.unlock();
}
} else {
createEntityFDandWrite(fs, logPath, objMapper, attemptId, groupId,
entities, isAppendSupported, logFDs);
}
} else {
createEntityFDandWrite(fs, logPath, objMapper, attemptId, groupId,
entities, isAppendSupported, logFDs);
}
}
private void createEntityFDandWrite(FileSystem fs, Path logPath,
ObjectMapper objMapper, ApplicationAttemptId attemptId,
TimelineEntityGroupId groupId, List<TimelineEntity> entities,
boolean isAppendSupported, Map<ApplicationAttemptId, HashMap<
TimelineEntityGroupId, EntityLogFD>> logFDs) throws IOException{
entityTableLocker.lock();
try {
if (serviceStopped) {
return;
}
HashMap<TimelineEntityGroupId, EntityLogFD> logFDMap =
logFDs.get(attemptId);
if (logFDMap == null) {
logFDMap = new HashMap<TimelineEntityGroupId, EntityLogFD>();
}
EntityLogFD logFD = logFDMap.get(groupId);
if (logFD == null) {
logFD = new EntityLogFD(fs, logPath, objMapper, isAppendSupported);
}
logFD.lock();
try {
logFD.writeEntities(entities);
entityTableCopyLocker.lock();
try {
logFDMap.put(groupId, logFD);
logFDs.put(attemptId, logFDMap);
} finally {
entityTableCopyLocker.unlock();
}
} finally {
logFD.unlock();
}
} finally {
entityTableLocker.unlock();
}
}
public void writeSummaryEntityLogs(FileSystem fs, Path logPath,
ObjectMapper objMapper, ApplicationAttemptId attemptId,
List<TimelineEntity> entities, boolean isAppendSupported)
throws IOException {
checkAndStartTimeTasks();
writeSummmaryEntityLogs(fs, logPath, objMapper, attemptId, entities,
isAppendSupported, this.summanyLogFDs);
}
private void writeSummmaryEntityLogs(FileSystem fs, Path logPath,
ObjectMapper objMapper, ApplicationAttemptId attemptId,
List<TimelineEntity> entities, boolean isAppendSupported,
Map<ApplicationAttemptId, EntityLogFD> logFDs) throws IOException {
EntityLogFD logFD = null;
logFD = logFDs.get(attemptId);
if (logFD != null) {
logFD.lock();
try {
if (serviceStopped) {
return;
}
logFD.writeEntities(entities);
} finally {
logFD.unlock();
}
} else {
createSummaryFDAndWrite(fs, logPath, objMapper, attemptId, entities,
isAppendSupported, logFDs);
}
}
private void createSummaryFDAndWrite(FileSystem fs, Path logPath,
ObjectMapper objMapper, ApplicationAttemptId attemptId,
List<TimelineEntity> entities, boolean isAppendSupported,
Map<ApplicationAttemptId, EntityLogFD> logFDs) throws IOException {
summaryTableLocker.lock();
try {
if (serviceStopped) {
return;
}
EntityLogFD logFD = logFDs.get(attemptId);
if (logFD == null) {
logFD = new EntityLogFD(fs, logPath, objMapper, isAppendSupported);
}
logFD.lock();
try {
logFD.writeEntities(entities);
summaryTableCopyLocker.lock();
try {
logFDs.put(attemptId, logFD);
} finally {
summaryTableCopyLocker.unlock();
}
} finally {
logFD.unlock();
}
} finally {
summaryTableLocker.unlock();
}
}
private void createAndStartTimerTasks() {
this.flushTimer =
new Timer(LogFDsCache.class.getSimpleName() + "FlushTimer",
true);
this.flushTimer.schedule(new FlushTimerTask(), flushIntervalSecs * 1000,
flushIntervalSecs * 1000);
this.cleanInActiveFDsTimer =
new Timer(LogFDsCache.class.getSimpleName()
+ "cleanInActiveFDsTimer", true);
this.cleanInActiveFDsTimer.schedule(new CleanInActiveFDsTask(),
cleanIntervalSecs * 1000, cleanIntervalSecs * 1000);
this.monitorTaskTimer =
new Timer(LogFDsCache.class.getSimpleName() + "MonitorTimer",
true);
this.monitorTaskTimer.schedule(new TimerMonitorTask(),
this.timerTaskRetainTTL);
}
private void checkAndStartTimeTasks() {
this.timerTasksMonitorReadLock.lock();
try {
this.timeStampOfLastWrite = Time.monotonicNow();
if(!timerTaskStarted) {
timerTaskLocker.lock();
try {
if (!timerTaskStarted) {
createAndStartTimerTasks();
timerTaskStarted = true;
}
} finally {
timerTaskLocker.unlock();
}
}
} finally {
this.timerTasksMonitorReadLock.unlock();
}
}
}
private static class AttemptDirCache {
private final int attemptDirCacheSize;
private final Map<ApplicationAttemptId, Path> attemptDirCache;
private final FileSystem fs;
private final Path activePath;
private final UserGroupInformation authUgi;
private final boolean storeInsideUserDir;
public AttemptDirCache(int cacheSize, FileSystem fs, Path activePath,
UserGroupInformation ugi, boolean storeInsideUserDir) {
this.attemptDirCacheSize = cacheSize;
this.attemptDirCache =
new LinkedHashMap<ApplicationAttemptId, Path>(
attemptDirCacheSize, 0.75f, true) {
private static final long serialVersionUID = 1L;
@Override
protected boolean removeEldestEntry(
Map.Entry<ApplicationAttemptId, Path> eldest) {
return size() > attemptDirCacheSize;
}
};
this.fs = fs;
this.activePath = activePath;
this.authUgi = ugi;
this.storeInsideUserDir = storeInsideUserDir;
}
public Path getAppAttemptDir(ApplicationAttemptId attemptId)
throws IOException {
Path attemptDir = this.attemptDirCache.get(attemptId);
if (attemptDir == null) {
synchronized(this) {
attemptDir = this.attemptDirCache.get(attemptId);
if (attemptDir == null) {
attemptDir = createAttemptDir(attemptId);
attemptDirCache.put(attemptId, attemptDir);
}
}
}
return attemptDir;
}
private Path createAttemptDir(ApplicationAttemptId appAttemptId)
throws IOException {
Path appDir = createApplicationDir(appAttemptId.getApplicationId());
Path attemptDir = new Path(appDir, appAttemptId.toString());
if (FileSystem.mkdirs(fs, attemptDir,
new FsPermission(APP_LOG_DIR_PERMISSIONS))) {
LOG.debug("New attempt directory created - {}", attemptDir);
}
return attemptDir;
}
private Path createApplicationDir(ApplicationId appId) throws IOException {
Path appRootDir = getAppRootDir(authUgi.getShortUserName());
Path appDir = new Path(appRootDir, appId.toString());
if (FileSystem.mkdirs(fs, appDir,
new FsPermission(APP_LOG_DIR_PERMISSIONS))) {
LOG.debug("New app directory created - {}", appDir);
}
return appDir;
}
private Path getAppRootDir(String user) throws IOException {
if (!storeInsideUserDir) {
return activePath;
}
Path userDir = new Path(activePath, user);
if (FileSystem.mkdirs(fs, userDir,
new FsPermission(APP_LOG_DIR_PERMISSIONS))) {
LOG.debug("New user directory created - {}", userDir);
}
return userDir;
}
}
}
|
apache/felix-dev | 34,645 | ipojo/runtime/core-it/ipojo-core-service-dependency-test/src/test/java/org/apache/felix/ipojo/runtime/core/test/dependencies/TestMethodMultipleDependencies.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.felix.ipojo.runtime.core.test.dependencies;
import org.apache.felix.ipojo.ComponentInstance;
import org.apache.felix.ipojo.architecture.Architecture;
import org.apache.felix.ipojo.architecture.InstanceDescription;
import org.apache.felix.ipojo.runtime.core.test.services.CheckService;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.osgi.framework.ServiceReference;
import java.util.Properties;
import static org.junit.Assert.*;
public class TestMethodMultipleDependencies extends Common {
ComponentInstance instance3, instance4, instance5, instance6, instance7;
ComponentInstance fooProvider1, fooProvider2;
@Before
public void setUp() {
try {
Properties prov = new Properties();
prov.put("instance.name", "FooProvider1");
fooProvider1 = ipojoHelper.getFactory("FooProviderType-1").createComponentInstance(prov);
fooProvider1.stop();
Properties prov2 = new Properties();
prov2.put("instance.name", "FooProvider2");
fooProvider2 = ipojoHelper.getFactory("FooProviderType-1").createComponentInstance(prov2);
fooProvider2.stop();
Properties i3 = new Properties();
i3.put("instance.name", "Object");
instance3 = ipojoHelper.getFactory("MObjectMultipleCheckServiceProvider").createComponentInstance(i3);
Properties i4 = new Properties();
i4.put("instance.name", "Ref");
instance4 = ipojoHelper.getFactory("MRefMultipleCheckServiceProvider").createComponentInstance(i4);
Properties i5 = new Properties();
i5.put("instance.name", "Both");
instance5 = ipojoHelper.getFactory("MBothMultipleCheckServiceProvider").createComponentInstance(i5);
Properties i6 = new Properties();
i6.put("instance.name", "Map");
instance6 = ipojoHelper.getFactory("MMapMultipleCheckServiceProvider").createComponentInstance(i6);
Properties i7 = new Properties();
i7.put("instance.name", "Dictionary");
instance7 = ipojoHelper.getFactory("MDictMultipleCheckServiceProvider").createComponentInstance(i7);
} catch (Exception e) {
fail(e.getMessage());
}
}
@After
public void tearDown() {
instance3.dispose();
instance4.dispose();
instance5.dispose();
instance6.dispose();
instance7.dispose();
fooProvider1.dispose();
fooProvider2.dispose();
instance3 = null;
instance4 = null;
instance5 = null;
instance6 = null;
instance7 = null;
fooProvider1 = null;
fooProvider2 = null;
}
@Test public void testObject() {
ServiceReference arch_ref = ipojoHelper.getServiceReferenceByName(Architecture.class.getName(), instance3.getInstanceName());
assertNotNull("Check architecture availability", arch_ref);
InstanceDescription id = ((Architecture) osgiHelper.getRawServiceObject(arch_ref)).getInstanceDescription();
assertTrue("Check instance invalidity - 1", id.getState() == ComponentInstance.INVALID);
fooProvider1.start();
id = ((Architecture) osgiHelper.getRawServiceObject(arch_ref)).getInstanceDescription();
assertTrue("Check instance validity - 2", id.getState() == ComponentInstance.VALID);
ServiceReference cs_ref = ipojoHelper.getServiceReferenceByName(CheckService.class.getName(), instance3.getInstanceName());
assertNotNull("Check CheckService availability", cs_ref);
CheckService cs = (CheckService) osgiHelper.getRawServiceObject(cs_ref);
Properties props = cs.getProps();
//Check properties
assertTrue("check CheckService invocation - 1", ((Boolean) props.get("result")).booleanValue()); // True, a provider is here
assertEquals("check void bind invocation - 1", ((Integer) props.get("voidB")).intValue(), 0);
assertEquals("check void unbind callback invocation - 1", ((Integer) props.get("voidU")).intValue(), 0);
assertEquals("check object bind callback invocation - 1", ((Integer) props.get("objectB")).intValue(), 1);
assertEquals("check object unbind callback invocation - 1", ((Integer) props.get("objectU")).intValue(), 0);
assertEquals("check ref bind callback invocation - 1", ((Integer) props.get("refB")).intValue(), 0);
assertEquals("check ref unbind callback invocation - 1", ((Integer) props.get("refU")).intValue(), 0);
assertEquals("Check FS invocation (int) - 1", ((Integer) props.get("int")).intValue(), 1);
assertEquals("Check FS invocation (long) - 1", ((Long) props.get("long")).longValue(), 1);
assertEquals("Check FS invocation (double) - 1", ((Double) props.get("double")).doubleValue(), 1.0, 0);
fooProvider2.start();
id = ((Architecture) osgiHelper.getRawServiceObject(arch_ref)).getInstanceDescription();
assertTrue("Check instance validity - 3", id.getState() == ComponentInstance.VALID);
cs = (CheckService) osgiHelper.getRawServiceObject(cs_ref);
props = cs.getProps();
//Check properties
assertTrue("check CheckService invocation - 2", ((Boolean) props.get("result")).booleanValue()); // True, two providers are here
assertEquals("check void bind invocation - 2", ((Integer) props.get("voidB")).intValue(), 0);
assertEquals("check void unbind callback invocation - 2", ((Integer) props.get("voidU")).intValue(), 0);
assertEquals("check object bind callback invocation - 2", ((Integer) props.get("objectB")).intValue(), 2);
assertEquals("check object unbind callback invocation - 2", ((Integer) props.get("objectU")).intValue(), 0);
assertEquals("check ref bind callback invocation - 2", ((Integer) props.get("refB")).intValue(), 0);
assertEquals("check ref unbind callback invocation - 2", ((Integer) props.get("refU")).intValue(), 0);
assertEquals("Check FS invocation (int) - 2", ((Integer) props.get("int")).intValue(), 2);
assertEquals("Check FS invocation (long) - 2", ((Long) props.get("long")).longValue(), 2);
assertEquals("Check FS invocation (double) - 2", ((Double) props.get("double")).doubleValue(), 2.0, 0);
fooProvider1.stop();
id = ((Architecture) osgiHelper.getRawServiceObject(arch_ref)).getInstanceDescription();
assertTrue("Check instance validity - 4", id.getState() == ComponentInstance.VALID);
cs = (CheckService) osgiHelper.getRawServiceObject(cs_ref);
props = cs.getProps();
//Check properties
assertTrue("check CheckService invocation - 3", ((Boolean) props.get("result")).booleanValue()); // True, two providers are here
assertEquals("check void bind invocation - 3", ((Integer) props.get("voidB")).intValue(), 0);
assertEquals("check void unbind callback invocation - 3", ((Integer) props.get("voidU")).intValue(), 0);
assertEquals("check object bind callback invocation - 3", ((Integer) props.get("objectB")).intValue(), 2);
assertEquals("check object unbind callback invocation - 3", ((Integer) props.get("objectU")).intValue(), 1);
assertEquals("check ref bind callback invocation - 3", ((Integer) props.get("refB")).intValue(), 0);
assertEquals("check ref unbind callback invocation - 3", ((Integer) props.get("refU")).intValue(), 0);
assertEquals("Check FS invocation (int) - 3", ((Integer) props.get("int")).intValue(), 1);
assertEquals("Check FS invocation (long) - 3", ((Long) props.get("long")).longValue(), 1);
assertEquals("Check FS invocation (double) - 3", ((Double) props.get("double")).doubleValue(), 1.0, 0);
fooProvider2.stop();
id = ((Architecture) osgiHelper.getRawServiceObject(arch_ref)).getInstanceDescription();
assertTrue("Check instance validity - 5", id.getState() == ComponentInstance.INVALID);
id = null;
cs = null;
getContext().ungetService(arch_ref);
getContext().ungetService(cs_ref);
}
@Test public void testRef() {
ServiceReference arch_ref = ipojoHelper.getServiceReferenceByName(Architecture.class.getName(), instance4.getInstanceName());
assertNotNull("Check architecture availability", arch_ref);
InstanceDescription id = ((Architecture) osgiHelper.getRawServiceObject(arch_ref)).getInstanceDescription();
assertTrue("Check instance invalidity - 1", id.getState() == ComponentInstance.INVALID);
fooProvider1.start();
id = ((Architecture) osgiHelper.getRawServiceObject(arch_ref)).getInstanceDescription();
assertTrue("Check instance validity - 2", id.getState() == ComponentInstance.VALID);
ServiceReference cs_ref = ipojoHelper.getServiceReferenceByName(CheckService.class.getName(), instance4.getInstanceName());
assertNotNull("Check CheckService availability", cs_ref);
CheckService cs = (CheckService) osgiHelper.getRawServiceObject(cs_ref);
Properties props = cs.getProps();
//Check properties
assertTrue("check CheckService invocation - 1", ((Boolean) props.get("result")).booleanValue()); // True, a provider is here
assertEquals("check void bind invocation - 1", ((Integer) props.get("voidB")).intValue(), 0);
assertEquals("check void unbind callback invocation - 1", ((Integer) props.get("voidU")).intValue(), 0);
assertEquals("check object bind callback invocation - 1", ((Integer) props.get("objectB")).intValue(), 0);
assertEquals("check object unbind callback invocation - 1", ((Integer) props.get("objectU")).intValue(), 0);
assertEquals("check ref bind callback invocation - 1", ((Integer) props.get("refB")).intValue(), 1);
assertEquals("check ref unbind callback invocation - 1", ((Integer) props.get("refU")).intValue(), 0);
assertEquals("Check FS invocation (int) - 1", ((Integer) props.get("int")).intValue(), 1);
assertEquals("Check FS invocation (long) - 1", ((Long) props.get("long")).longValue(), 1);
assertEquals("Check FS invocation (double) - 1", ((Double) props.get("double")).doubleValue(), 1.0, 0);
fooProvider2.start();
id = ((Architecture) osgiHelper.getRawServiceObject(arch_ref)).getInstanceDescription();
assertTrue("Check instance validity - 3", id.getState() == ComponentInstance.VALID);
cs = (CheckService) osgiHelper.getRawServiceObject(cs_ref);
props = cs.getProps();
//Check properties
assertTrue("check CheckService invocation - 2", ((Boolean) props.get("result")).booleanValue()); // True, two providers are here
assertEquals("check void bind invocation - 2", ((Integer) props.get("voidB")).intValue(), 0);
assertEquals("check void unbind callback invocation - 2", ((Integer) props.get("voidU")).intValue(), 0);
assertEquals("check object bind callback invocation - 2", ((Integer) props.get("objectB")).intValue(), 0);
assertEquals("check object unbind callback invocation - 2", ((Integer) props.get("objectU")).intValue(), 0);
assertEquals("check ref bind callback invocation - 2", ((Integer) props.get("refB")).intValue(), 2);
assertEquals("check ref unbind callback invocation - 2", ((Integer) props.get("refU")).intValue(), 0);
assertEquals("Check FS invocation (int) - 2", ((Integer) props.get("int")).intValue(), 2);
assertEquals("Check FS invocation (long) - 2", ((Long) props.get("long")).longValue(), 2);
assertEquals("Check FS invocation (double) - 2", ((Double) props.get("double")).doubleValue(), 2.0, 0);
fooProvider1.stop();
id = ((Architecture) osgiHelper.getRawServiceObject(arch_ref)).getInstanceDescription();
assertTrue("Check instance validity - 4", id.getState() == ComponentInstance.VALID);
cs = (CheckService) osgiHelper.getRawServiceObject(cs_ref);
props = cs.getProps();
//Check properties
assertTrue("check CheckService invocation - 3", ((Boolean) props.get("result")).booleanValue()); // True, two providers are here
assertEquals("check void bind invocation - 3", ((Integer) props.get("voidB")).intValue(), 0);
assertEquals("check void unbind callback invocation - 3", ((Integer) props.get("voidU")).intValue(), 0);
assertEquals("check object bind callback invocation - 3", ((Integer) props.get("objectB")).intValue(), 0);
assertEquals("check object unbind callback invocation - 3", ((Integer) props.get("objectU")).intValue(), 0);
assertEquals("check ref bind callback invocation - 3", ((Integer) props.get("refB")).intValue(), 2);
assertEquals("check ref unbind callback invocation - 3", ((Integer) props.get("refU")).intValue(), 1);
assertEquals("Check FS invocation (int) - 3", ((Integer) props.get("int")).intValue(), 1);
assertEquals("Check FS invocation (long) - 3", ((Long) props.get("long")).longValue(), 1);
assertEquals("Check FS invocation (double) - 3", ((Double) props.get("double")).doubleValue(), 1.0, 0);
fooProvider2.stop();
id = ((Architecture) osgiHelper.getRawServiceObject(arch_ref)).getInstanceDescription();
assertTrue("Check instance validity - 5", id.getState() == ComponentInstance.INVALID);
id = null;
cs = null;
getContext().ungetService(arch_ref);
getContext().ungetService(cs_ref);
}
@Test public void testBoth() {
ServiceReference arch_ref = ipojoHelper.getServiceReferenceByName(Architecture.class.getName(), instance5.getInstanceName());
assertNotNull("Check architecture availability", arch_ref);
InstanceDescription id = ((Architecture) osgiHelper.getRawServiceObject(arch_ref)).getInstanceDescription();
assertTrue("Check instance invalidity - 1", id.getState() == ComponentInstance.INVALID);
fooProvider1.start();
id = ((Architecture) osgiHelper.getRawServiceObject(arch_ref)).getInstanceDescription();
assertTrue("Check instance validity - 2", id.getState() == ComponentInstance.VALID);
ServiceReference cs_ref = ipojoHelper.getServiceReferenceByName(CheckService.class.getName(), instance5.getInstanceName());
assertNotNull("Check CheckService availability", cs_ref);
CheckService cs = (CheckService) osgiHelper.getRawServiceObject(cs_ref);
Properties props = cs.getProps();
//Check properties
assertTrue("check CheckService invocation - 1", ((Boolean) props.get("result")).booleanValue()); // True, a provider is here
assertEquals("check void bind invocation - 1", ((Integer) props.get("voidB")).intValue(), 0);
assertEquals("check void unbind callback invocation - 1", ((Integer) props.get("voidU")).intValue(), 0);
assertEquals("check object bind callback invocation - 1", ((Integer) props.get("objectB")).intValue(), 0);
assertEquals("check object unbind callback invocation - 1", ((Integer) props.get("objectU")).intValue(), 0);
assertEquals("check both bind callback invocation - 1", ((Integer) props.get("bothB")).intValue(), 1);
assertEquals("check both unbind callback invocation - 1", ((Integer) props.get("bothU")).intValue(), 0);
assertEquals("check map bind callback invocation -1", ((Integer) props.get("mapB")).intValue(), 0);
assertEquals("check map unbind callback invocation -1", ((Integer) props.get("mapU")).intValue(), 0);
assertEquals("check dict bind callback invocation -1", ((Integer) props.get("dictB")).intValue(), 0);
assertEquals("check dict unbind callback invocation -1", ((Integer) props.get("dictU")).intValue(), 0);
assertEquals("Check FS invocation (int) - 1", ((Integer) props.get("int")).intValue(), 1);
assertEquals("Check FS invocation (long) - 1", ((Long) props.get("long")).longValue(), 1);
assertEquals("Check FS invocation (double) - 1", ((Double) props.get("double")).doubleValue(), 1.0, 0);
fooProvider2.start();
id = ((Architecture) osgiHelper.getRawServiceObject(arch_ref)).getInstanceDescription();
assertTrue("Check instance validity - 3", id.getState() == ComponentInstance.VALID);
cs = (CheckService) osgiHelper.getRawServiceObject(cs_ref);
props = cs.getProps();
//Check properties
assertTrue("check CheckService invocation - 2", ((Boolean) props.get("result")).booleanValue()); // True, two providers are here
assertEquals("check void bind invocation - 2", ((Integer) props.get("voidB")).intValue(), 0);
assertEquals("check void unbind callback invocation - 2", ((Integer) props.get("voidU")).intValue(), 0);
assertEquals("check object bind callback invocation - 2", ((Integer) props.get("objectB")).intValue(), 0);
assertEquals("check object unbind callback invocation - 2", ((Integer) props.get("objectU")).intValue(), 0);
assertEquals("check both bind callback invocation - 2", ((Integer) props.get("bothB")).intValue(), 2);
assertEquals("check both unbind callback invocation - 2", ((Integer) props.get("bothU")).intValue(), 0);
assertEquals("check map bind callback invocation -2", ((Integer) props.get("mapB")).intValue(), 0);
assertEquals("check map unbind callback invocation -2", ((Integer) props.get("mapU")).intValue(), 0);
assertEquals("check dict bind callback invocation -2", ((Integer) props.get("dictB")).intValue(), 0);
assertEquals("check dict unbind callback invocation -2", ((Integer) props.get("dictU")).intValue(), 0);
assertEquals("Check FS invocation (int) - 2", ((Integer) props.get("int")).intValue(), 2);
assertEquals("Check FS invocation (long) - 2", ((Long) props.get("long")).longValue(), 2);
assertEquals("Check FS invocation (double) - 2", ((Double) props.get("double")).doubleValue(), 2.0, 0);
fooProvider1.stop();
id = ((Architecture) osgiHelper.getRawServiceObject(arch_ref)).getInstanceDescription();
assertTrue("Check instance validity - 4", id.getState() == ComponentInstance.VALID);
cs = (CheckService) osgiHelper.getRawServiceObject(cs_ref);
props = cs.getProps();
//Check properties
assertTrue("check CheckService invocation - 3", ((Boolean) props.get("result")).booleanValue()); // True, two providers are here
assertEquals("check void bind invocation - 3", ((Integer) props.get("voidB")).intValue(), 0);
assertEquals("check void unbind callback invocation - 3", ((Integer) props.get("voidU")).intValue(), 0);
assertEquals("check object bind callback invocation - 3", ((Integer) props.get("objectB")).intValue(), 0);
assertEquals("check object unbind callback invocation - 3", ((Integer) props.get("objectU")).intValue(), 0);
assertEquals("check both bind callback invocation - 3", ((Integer) props.get("bothB")).intValue(), 2);
assertEquals("check both unbind callback invocation - 3", ((Integer) props.get("bothU")).intValue(), 1);
assertEquals("check map bind callback invocation -3", ((Integer) props.get("mapB")).intValue(), 0);
assertEquals("check map unbind callback invocation -3", ((Integer) props.get("mapU")).intValue(), 0);
assertEquals("check dict bind callback invocation -3", ((Integer) props.get("dictB")).intValue(), 0);
assertEquals("check dict unbind callback invocation -3", ((Integer) props.get("dictU")).intValue(), 0);
assertEquals("Check FS invocation (int) - 3", ((Integer) props.get("int")).intValue(), 1);
assertEquals("Check FS invocation (long) - 3", ((Long) props.get("long")).longValue(), 1);
assertEquals("Check FS invocation (double) - 3", ((Double) props.get("double")).doubleValue(), 1.0, 0);
fooProvider2.stop();
id = ((Architecture) osgiHelper.getRawServiceObject(arch_ref)).getInstanceDescription();
assertTrue("Check instance validity - 5", id.getState() == ComponentInstance.INVALID);
id = null;
cs = null;
getContext().ungetService(arch_ref);
getContext().ungetService(cs_ref);
}
@Test public void testMap() {
ServiceReference arch_ref = ipojoHelper.getServiceReferenceByName(Architecture.class.getName(), instance6.getInstanceName());
assertNotNull("Check architecture availability", arch_ref);
InstanceDescription id = ((Architecture) osgiHelper.getRawServiceObject(arch_ref)).getInstanceDescription();
assertTrue("Check instance invalidity - 1", id.getState() == ComponentInstance.INVALID);
fooProvider1.start();
id = ((Architecture) osgiHelper.getRawServiceObject(arch_ref)).getInstanceDescription();
assertTrue("Check instance validity - 2", id.getState() == ComponentInstance.VALID);
ServiceReference cs_ref = ipojoHelper.getServiceReferenceByName(CheckService.class.getName(), instance6.getInstanceName());
assertNotNull("Check CheckService availability", cs_ref);
CheckService cs = (CheckService) osgiHelper.getRawServiceObject(cs_ref);
Properties props = cs.getProps();
//Check properties
assertTrue("check CheckService invocation - 1", ((Boolean) props.get("result")).booleanValue()); // True, a provider is here
assertEquals("check void bind invocation - 1", ((Integer) props.get("voidB")).intValue(), 0);
assertEquals("check void unbind callback invocation - 1", ((Integer) props.get("voidU")).intValue(), 0);
assertEquals("check object bind callback invocation - 1", ((Integer) props.get("objectB")).intValue(), 0);
assertEquals("check object unbind callback invocation - 1", ((Integer) props.get("objectU")).intValue(), 0);
assertEquals("check both bind callback invocation - 1", ((Integer) props.get("bothB")).intValue(), 0);
assertEquals("check both unbind callback invocation - 1", ((Integer) props.get("bothU")).intValue(), 0);
assertEquals("check map bind callback invocation -1", ((Integer) props.get("mapB")).intValue(), 1);
assertEquals("check map unbind callback invocation -1", ((Integer) props.get("mapU")).intValue(), 0);
assertEquals("check dict bind callback invocation -1", ((Integer) props.get("dictB")).intValue(), 0);
assertEquals("check dict unbind callback invocation -1", ((Integer) props.get("dictU")).intValue(), 0);
assertEquals("Check FS invocation (int) - 1", ((Integer) props.get("int")).intValue(), 1);
assertEquals("Check FS invocation (long) - 1", ((Long) props.get("long")).longValue(), 1);
assertEquals("Check FS invocation (double) - 1", ((Double) props.get("double")).doubleValue(), 1.0, 0);
fooProvider2.start();
id = ((Architecture) osgiHelper.getRawServiceObject(arch_ref)).getInstanceDescription();
assertTrue("Check instance validity - 3", id.getState() == ComponentInstance.VALID);
cs = (CheckService) osgiHelper.getRawServiceObject(cs_ref);
props = cs.getProps();
//Check properties
assertTrue("check CheckService invocation - 2", ((Boolean) props.get("result")).booleanValue()); // True, two providers are here
assertEquals("check void bind invocation - 2", ((Integer) props.get("voidB")).intValue(), 0);
assertEquals("check void unbind callback invocation - 2", ((Integer) props.get("voidU")).intValue(), 0);
assertEquals("check object bind callback invocation - 2", ((Integer) props.get("objectB")).intValue(), 0);
assertEquals("check object unbind callback invocation - 2", ((Integer) props.get("objectU")).intValue(), 0);
assertEquals("check both bind callback invocation - 2", ((Integer) props.get("bothB")).intValue(), 0);
assertEquals("check both unbind callback invocation - 2", ((Integer) props.get("bothU")).intValue(), 0);
assertEquals("check map bind callback invocation -2", ((Integer) props.get("mapB")).intValue(), 2);
assertEquals("check map unbind callback invocation -2", ((Integer) props.get("mapU")).intValue(), 0);
assertEquals("check dict bind callback invocation -2", ((Integer) props.get("dictB")).intValue(), 0);
assertEquals("check dict unbind callback invocation -2", ((Integer) props.get("dictU")).intValue(), 0);
assertEquals("Check FS invocation (int) - 2", ((Integer) props.get("int")).intValue(), 2);
assertEquals("Check FS invocation (long) - 2", ((Long) props.get("long")).longValue(), 2);
assertEquals("Check FS invocation (double) - 2", ((Double) props.get("double")).doubleValue(), 2.0, 0);
fooProvider1.stop();
id = ((Architecture) osgiHelper.getRawServiceObject(arch_ref)).getInstanceDescription();
assertTrue("Check instance validity - 4", id.getState() == ComponentInstance.VALID);
cs = (CheckService) osgiHelper.getRawServiceObject(cs_ref);
props = cs.getProps();
//Check properties
assertTrue("check CheckService invocation - 3", ((Boolean) props.get("result")).booleanValue()); // True, two providers are here
assertEquals("check void bind invocation - 3", ((Integer) props.get("voidB")).intValue(), 0);
assertEquals("check void unbind callback invocation - 3", ((Integer) props.get("voidU")).intValue(), 0);
assertEquals("check object bind callback invocation - 3", ((Integer) props.get("objectB")).intValue(), 0);
assertEquals("check object unbind callback invocation - 3", ((Integer) props.get("objectU")).intValue(), 0);
assertEquals("check both bind callback invocation - 3", ((Integer) props.get("bothB")).intValue(), 0);
assertEquals("check both unbind callback invocation - 3", ((Integer) props.get("bothU")).intValue(), 0);
assertEquals("check map bind callback invocation -3", ((Integer) props.get("mapB")).intValue(), 2);
assertEquals("check map unbind callback invocation -3", ((Integer) props.get("mapU")).intValue(), 1);
assertEquals("check dict bind callback invocation -3", ((Integer) props.get("dictB")).intValue(), 0);
assertEquals("check dict unbind callback invocation -3", ((Integer) props.get("dictU")).intValue(), 0);
assertEquals("Check FS invocation (int) - 3", ((Integer) props.get("int")).intValue(), 1);
assertEquals("Check FS invocation (long) - 3", ((Long) props.get("long")).longValue(), 1);
assertEquals("Check FS invocation (double) - 3", ((Double) props.get("double")).doubleValue(), 1.0, 0);
fooProvider2.stop();
id = ((Architecture) osgiHelper.getRawServiceObject(arch_ref)).getInstanceDescription();
assertTrue("Check instance validity - 5", id.getState() == ComponentInstance.INVALID);
id = null;
cs = null;
getContext().ungetService(arch_ref);
getContext().ungetService(cs_ref);
}
@Test public void testDict() {
ServiceReference arch_ref = ipojoHelper.getServiceReferenceByName(Architecture.class.getName(), instance7.getInstanceName());
assertNotNull("Check architecture availability", arch_ref);
InstanceDescription id = ((Architecture) osgiHelper.getRawServiceObject(arch_ref)).getInstanceDescription();
assertTrue("Check instance invalidity - 1", id.getState() == ComponentInstance.INVALID);
fooProvider1.start();
id = ((Architecture) osgiHelper.getRawServiceObject(arch_ref)).getInstanceDescription();
assertTrue("Check instance validity - 2", id.getState() == ComponentInstance.VALID);
ServiceReference cs_ref = ipojoHelper.getServiceReferenceByName(CheckService.class.getName(), instance7.getInstanceName());
assertNotNull("Check CheckService availability", cs_ref);
CheckService cs = (CheckService) osgiHelper.getRawServiceObject(cs_ref);
Properties props = cs.getProps();
//Check properties
assertTrue("check CheckService invocation - 1", ((Boolean) props.get("result")).booleanValue()); // True, a provider is here
assertEquals("check void bind invocation - 1", ((Integer) props.get("voidB")).intValue(), 0);
assertEquals("check void unbind callback invocation - 1", ((Integer) props.get("voidU")).intValue(), 0);
assertEquals("check object bind callback invocation - 1", ((Integer) props.get("objectB")).intValue(), 0);
assertEquals("check object unbind callback invocation - 1", ((Integer) props.get("objectU")).intValue(), 0);
assertEquals("check both bind callback invocation - 1", ((Integer) props.get("bothB")).intValue(), 0);
assertEquals("check both unbind callback invocation - 1", ((Integer) props.get("bothU")).intValue(), 0);
assertEquals("check map bind callback invocation -1", ((Integer) props.get("mapB")).intValue(), 0);
assertEquals("check map unbind callback invocation -1", ((Integer) props.get("mapU")).intValue(), 0);
assertEquals("check dict bind callback invocation -1", ((Integer) props.get("dictB")).intValue(), 1);
assertEquals("check dict unbind callback invocation -1", ((Integer) props.get("dictU")).intValue(), 0);
assertEquals("Check FS invocation (int) - 1", ((Integer) props.get("int")).intValue(), 1);
assertEquals("Check FS invocation (long) - 1", ((Long) props.get("long")).longValue(), 1);
assertEquals("Check FS invocation (double) - 1", ((Double) props.get("double")).doubleValue(), 1.0, 0);
fooProvider2.start();
id = ((Architecture) osgiHelper.getRawServiceObject(arch_ref)).getInstanceDescription();
assertTrue("Check instance validity - 3", id.getState() == ComponentInstance.VALID);
cs = (CheckService) osgiHelper.getRawServiceObject(cs_ref);
props = cs.getProps();
//Check properties
assertTrue("check CheckService invocation - 2", ((Boolean) props.get("result")).booleanValue()); // True, two providers are here
assertEquals("check void bind invocation - 2", ((Integer) props.get("voidB")).intValue(), 0);
assertEquals("check void unbind callback invocation - 2", ((Integer) props.get("voidU")).intValue(), 0);
assertEquals("check object bind callback invocation - 2", ((Integer) props.get("objectB")).intValue(), 0);
assertEquals("check object unbind callback invocation - 2", ((Integer) props.get("objectU")).intValue(), 0);
assertEquals("check both bind callback invocation - 2", ((Integer) props.get("bothB")).intValue(), 0);
assertEquals("check both unbind callback invocation - 2", ((Integer) props.get("bothU")).intValue(), 0);
assertEquals("check map bind callback invocation -2", ((Integer) props.get("mapB")).intValue(), 0);
assertEquals("check map unbind callback invocation -2", ((Integer) props.get("mapU")).intValue(), 0);
assertEquals("check dict bind callback invocation -2", ((Integer) props.get("dictB")).intValue(), 2);
assertEquals("check dict unbind callback invocation -2", ((Integer) props.get("dictU")).intValue(), 0);
assertEquals("Check FS invocation (int) - 2", ((Integer) props.get("int")).intValue(), 2);
assertEquals("Check FS invocation (long) - 2", ((Long) props.get("long")).longValue(), 2);
assertEquals("Check FS invocation (double) - 2", ((Double) props.get("double")).doubleValue(), 2.0, 0);
fooProvider1.stop();
id = ((Architecture) osgiHelper.getRawServiceObject(arch_ref)).getInstanceDescription();
assertTrue("Check instance validity - 4", id.getState() == ComponentInstance.VALID);
cs = (CheckService) osgiHelper.getRawServiceObject(cs_ref);
props = cs.getProps();
//Check properties
assertTrue("check CheckService invocation - 3", ((Boolean) props.get("result")).booleanValue()); // True, two providers are here
assertEquals("check void bind invocation - 3", ((Integer) props.get("voidB")).intValue(), 0);
assertEquals("check void unbind callback invocation - 3", ((Integer) props.get("voidU")).intValue(), 0);
assertEquals("check object bind callback invocation - 3", ((Integer) props.get("objectB")).intValue(), 0);
assertEquals("check object unbind callback invocation - 3", ((Integer) props.get("objectU")).intValue(), 0);
assertEquals("check both bind callback invocation - 3", ((Integer) props.get("bothB")).intValue(), 0);
assertEquals("check both unbind callback invocation - 3", ((Integer) props.get("bothU")).intValue(), 0);
assertEquals("check map bind callback invocation -3", ((Integer) props.get("mapB")).intValue(), 0);
assertEquals("check map unbind callback invocation -3", ((Integer) props.get("mapU")).intValue(), 0);
assertEquals("check dict bind callback invocation -3", ((Integer) props.get("dictB")).intValue(), 2);
assertEquals("check dict unbind callback invocation -3", ((Integer) props.get("dictU")).intValue(), 1);
assertEquals("Check FS invocation (int) - 3", ((Integer) props.get("int")).intValue(), 1);
assertEquals("Check FS invocation (long) - 3", ((Long) props.get("long")).longValue(), 1);
assertEquals("Check FS invocation (double) - 3", ((Double) props.get("double")).doubleValue(), 1.0, 0);
fooProvider2.stop();
id = ((Architecture) osgiHelper.getRawServiceObject(arch_ref)).getInstanceDescription();
assertTrue("Check instance validity - 5", id.getState() == ComponentInstance.INVALID);
id = null;
cs = null;
getContext().ungetService(arch_ref);
getContext().ungetService(cs_ref);
}
}
|
google/sagetv | 35,001 | java/sage/NetworkClient.java | /*
* Copyright 2015 The SageTV Authors. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package sage;
import java.util.HashMap;
import java.util.Map;
/*
* 10/20/05 - We're changing the network system to only use 2 channels for communication instead of 4. We'll
* just keep the serverNotifier and clientListener. The serverNotifier will become a message based protocol which
* handles most client/server communcation. The clientListener will be used to receive larger data updates such as
* DB, properties and profiler information.
*/
public class NetworkClient
{
public static final int CS_STATE_UNAUTHORIZED_CLIENT = -2;
public static final int CS_STATE_VERSION_MISMATCH = -1;
public static final int CS_STATE_NOT_INITIALIZED = 0;
public static final int CS_STATE_SERVER_NOT_FOUND = 1;
public static final int CS_STATE_SERVER_LOADING = 2;
public static final int CS_STATE_CONNECTION_ESTABLISHING = 3;
public static final int CS_STATE_FULLY_CONNECTED = 4;
private NetworkClient()
{
createTime = Sage.time();
}
public static NetworkClient connectToServer(String serverName, boolean popupError, boolean returnAlways)
{
NetworkClient neddy = new NetworkClient();
if (neddy.connect(serverName, popupError))
return neddy;
else
{
if (!returnAlways)
{
neddy.fullCleanup();
return null;
}
else
return neddy;
}
}
public boolean isClientConnected()
{
return clientListener != null && serverNotifier != null;
}
private boolean connect(String serverName, boolean popupError)
{
java.net.Socket sake = null;
try
{
if ("unknown".equals(serverName))
throw new java.net.UnknownHostException("Local server not known yet...don't try to connect");
sake = new java.net.Socket();
sake.connect(new java.net.InetSocketAddress(serverName, Sage.getInt(SageTV.SAGETV_PORT, SageTV.DEFAULT_SAGETV_PORT)));
clientListener = new SageTVConnection(sake);
// If we got here then we are OK to connect to this server, set our connection state
connectionState = CS_STATE_CONNECTION_ESTABLISHING;
if (Sage.DBG) System.out.println("Setting c/s connection state to: CONNECTION_ESTABLISHING");
SageTV.writeOutStateInfo(SageTV.SAGETV_CLIENT_CONNECTING, serverName, null);
clientListener.takeFormOfListener();
sake = new java.net.Socket();
sake.connect(new java.net.InetSocketAddress(serverName, Sage.getInt(SageTV.SAGETV_PORT, SageTV.DEFAULT_SAGETV_PORT)));
serverNotifier = new SageTVConnection(sake);
serverNotifier.takeFormOfNotifier(clientListener);
clientName = clientListener.getClientName();
sake = null;
}
catch (java.net.UnknownHostException e)
{
cleanupFailedClientConnection();
if (popupError)
MySwingUtils.showWrappedMessageDialog(Sage.rez("Network_Cannot_Connect_Error") + " (0)",
Sage.rez("Network_Error"), javax.swing.JOptionPane.ERROR_MESSAGE);
System.out.println("Error establishing server connection to " + serverName + " of:" + e);
e.printStackTrace();
return false;
}
catch (java.net.ConnectException e)
{
cleanupFailedClientConnection();
if (popupError)
MySwingUtils.showWrappedMessageDialog(Sage.rez("Network_Cannot_Connect_Error") + " (1)",
Sage.rez("Network_Error"), javax.swing.JOptionPane.ERROR_MESSAGE);
System.out.println("Error establishing server connection to " + serverName + " of:" + e);
e.printStackTrace();
return false;
}
catch (java.io.IOException e)
{
cleanupFailedClientConnection();
if (e.getMessage() != null && e.getMessage().startsWith("BAD_LICENSE"))
{
if (popupError)
{
MySwingUtils.showWrappedMessageDialog(Sage.rez("Network_License_Error"),
Sage.rez("Network_Error"), javax.swing.JOptionPane.ERROR_MESSAGE);
}
connectionState = CS_STATE_UNAUTHORIZED_CLIENT;
SageTV.writeOutStateInfo(SageTV.SAGETV_CLIENT_UNAUTHORIZED, serverName, null);
}
else if (e.getMessage() != null && e.getMessage().startsWith("VERSION_ERR"))
{
if (popupError)
{
MySwingUtils.showWrappedMessageDialog(Sage.rez("Network_Version_Error"),
Sage.rez("Network_Error"), javax.swing.JOptionPane.ERROR_MESSAGE);
}
connectionState = CS_STATE_VERSION_MISMATCH;
SageTV.writeOutStateInfo(SageTV.SAGETV_CLIENT_VERSION_MISMATCH, serverName, null);
}
else
{
if (popupError)
{
MySwingUtils.showWrappedMessageDialog(Sage.rez("Network_Cannot_Connect_Error") + " (2)",
Sage.rez("Network_Error"), javax.swing.JOptionPane.ERROR_MESSAGE);
}
}
System.out.println("Error establishing server connection to " + serverName + " of:" + e);
e.printStackTrace();
return false;
}
catch (Throwable e)
{
cleanupFailedClientConnection();
if (popupError)
MySwingUtils.showWrappedMessageDialog(Sage.rez("Network_Cannot_Connect_Error") + " (2)",
Sage.rez("Network_Error"), javax.swing.JOptionPane.ERROR_MESSAGE);
System.out.println("Error establishing server connection to " + serverName + " of:" + e);
e.printStackTrace();
return false;
}
finally
{
if (sake != null)
{
try
{
sake.close();
}
catch (Exception e){}
sake = null;
}
}
return true;
}
public static String getClientCapability(String client, String capability) {
NetworkClient neddy = (NetworkClient) clientMap.get(client);
if(neddy == null) return null;
return neddy.capabilities.get(capability);
}
public static void setClientCapability(String client, String capability, String value) {
NetworkClient neddy = (NetworkClient) clientMap.get(client);
if(neddy == null) return;
if(value == null || value.length() == 0)
neddy.capabilities.remove(capability);
else
neddy.capabilities.put(capability, value);
}
public static void clearClientCapabilities(String client) {
NetworkClient neddy = (NetworkClient) clientMap.get(client);
if(neddy == null) return;
neddy.capabilities.clear();
}
public static void acceptFromClient(java.net.Socket s)
{
SageTVConnection stvconn = null;
try
{
stvconn = new SageTVConnection(s);
String clientName = stvconn.getClientName();
if (stvconn.getLinkType() == SageTVConnection.CLIENT_LISTENER)
{
if (clientMap.containsKey(clientName))
{
// Huh? Why would this occur?
NetworkClient oldConn = (NetworkClient) clientMap.get(clientName);
oldConn.fullCleanup();
clientMap.remove(clientName);
}
// Before we do this; we need to check to see if another client has already connected and only done the first
// part of its initialization. If so; we don't want to block it on completion of the second part.
// Narflex - 5/14/2012 - I added a synchronized (clientMap) around this block of code because if you don't then if a property
// change on the server occurs during the initializeListenerData process after that process has done the property sync,
// but before the new NetworkClient gets put into the clientMap; then the properties on that client could end up being
// out of sync w/ the server. That would explain the one time I saw this after enabling channels in the server but them
// not appearing in the client...which I think was done as part of restarting both at once, so those ops could have
// occurred at times as outlined here.
// Narflex - 5/30/2012 - I had to remove the sync block because it could lockup other threads (like the Seeker) that were making calls
// into here that required usage of the clientMap var. Now I track the property changes that we need to correct things and push them out
// after we are done here.
pendingPropXfers = new java.util.Vector();
stvconn.initializeListenerData();
NetworkClient neddy = new NetworkClient();
neddy.clientListener = stvconn;
neddy.clientName = stvconn.getClientName();
clientMap.put(clientName, neddy);
java.util.Vector myPropXfers = pendingPropXfers;
pendingPropXfers = null;
if (!myPropXfers.isEmpty())
{
if (Sage.DBG) System.out.println("Sending property syncs that occurred during connection initiation...");
stvconn.updateProperties((String[]) myPropXfers.toArray(Pooler.EMPTY_STRING_ARRAY));
}
// In case there's network encoders on the new client that just came up also
SchedulerSelector.getInstance().kick(true);
}
else
{
NetworkClient currConn = (NetworkClient) clientMap.get(clientName);
if (currConn == null)
{
// Try to just match it to any other connection that hasn't been completed yet
if (!clientMap.isEmpty())
{
if (Sage.DBG) System.out.println("Couldn't match up client connection on specified ip:port; search manually...");
java.util.Iterator walker = clientMap.values().iterator();
while (walker.hasNext())
{
currConn = (NetworkClient) walker.next();
if (currConn != null && currConn.serverNotifier == null)
{
currConn.clientListener.setClientName(clientName);
walker.remove();
clientMap.put(clientName, currConn);
currConn.clientName = clientName;
break;
}
currConn = null;
}
}
if (currConn == null)
{
if (Sage.DBG) System.out.println("Cleaning up new connection because we can't match it up newName=" + clientName +
" map=" + clientMap);
stvconn.cleanup();
return;
}
}
// This is the server notifier, which is the final connection made. Now all of
// the threads can be started and we can mark us as alive.
currConn.serverNotifier = stvconn;
currConn.alive = true;
currConn.clientListener.setupKeepAlive();
currConn.serverNotifier.setListenerMsgShare(currConn.clientListener.getListenerMsgShare());
currConn.serverNotifier.startupMessagingThreads();
currConn.clientIP = currConn.serverNotifier.getSocket().getInetAddress().getHostAddress();
sage.plugin.PluginEventManager.postEvent(sage.plugin.PluginEventManager.CLIENT_CONNECTED,
new Object[] { sage.plugin.PluginEventManager.VAR_IPADDRESS, currConn.clientIP,
sage.plugin.PluginEventManager.VAR_MACADDRESS, null});
if (!Sage.client)
{
String myLocalIP = "127.0.0.1";
try
{
myLocalIP = java.net.InetAddress.getLocalHost().getHostAddress();
}
catch (Exception e){}
if ("127.0.0.1".equals(currConn.clientIP) || myLocalIP.equals(currConn.clientIP))
{
currConn.localClient = true;
}
}
}
}
catch (Throwable e)
{
System.out.println("Error w/SageTV client connection:" + e);
e.printStackTrace(System.out);
if (stvconn != null)
{
NetworkClient oldConn = (NetworkClient) clientMap.get(stvconn.getClientName());
if (oldConn != null)
oldConn.fullCleanup();
else
{
// If we aren't registered in the clientMap yet, we still may need to remove ourselves as listeners here.
Wizard.getInstance().removeXctListener(stvconn);
Carny.getInstance().removeCarnyListener(stvconn);
}
stvconn.cleanup();
}
}
finally
{
pendingPropXfers = null;
}
}
public void clientIsActive()
{
alive = true;
if (serverNotifier != null)
serverNotifier.startupMessagingThreads();
if (clientListener != null)
{
Pooler.execute(clientListener, "ClientListener");
}
}
public void startCleanup()
{
alive = false;
if (clientListener != null)
{
synchronized (clientListener)
{
clientListener.cleanup();
clientListener = null;
}
}
}
public void finishCleanup()
{
if (serverNotifier != null)
{
synchronized (serverNotifier)
{
serverNotifier.cleanup();
serverNotifier = null;
}
}
}
// This should be called if there's a failure while we're connecting to the server
private void cleanupFailedClientConnection()
{
if (clientListener != null)
{
clientListener.cleanup();
clientListener = null;
}
if (serverNotifier != null)
{
serverNotifier.cleanup();
serverNotifier = null;
}
}
/*
* NOTE: Don't sync on the cleanup code. We've had deadlocks here and its silly to try
* to sync lock when things are dying.
*/
public void fullCleanup()
{
if (Sage.DBG) System.out.println("NetworkClient fullCleanup " + clientName);
if (alive && serverNotifier != null)
{
sage.plugin.PluginEventManager.postEvent(sage.plugin.PluginEventManager.CLIENT_DISCONNECTED,
new Object[] { sage.plugin.PluginEventManager.VAR_IPADDRESS, clientIP,
sage.plugin.PluginEventManager.VAR_MACADDRESS, null});
}
alive = false;
if (Sage.client && serverNotifier != null)
{
// synchronized (serverNotifier)
{
java.util.Iterator uiWalker = UIManager.getUIIterator();
while (uiWalker.hasNext())
SeekerSelector.getInstance().finishWatch((UIClient) uiWalker.next());
}
}
if (clientListener != null)
{
// synchronized (clientListener)
{
if (!Sage.client)
{
Wizard.getInstance().removeXctListener(clientListener);
Carny.getInstance().removeCarnyListener(clientListener);
}
clientListener.cleanup();
clientListener = null;
}
}
if (serverNotifier != null)
{
// synchronized (serverNotifier)
{
serverNotifier.cleanup();
serverNotifier = null;
}
}
if (!Sage.client)
{
// Ensure we are actually removing our own object from the map in case it somehow got replaced
// with a new one of the same name (very possible for the clientID map)
synchronized (clientMap)
{
if (clientMap.get(clientName) == this)
clientMap.remove(clientName);
}
// Trigger cleanup of the watch file map
SeekerSelector.getInstance().kick();
}
}
public static void distributeRecursivePropertyChange(String name)
{
if (clientMap.isEmpty() && pendingPropXfers == null)
return;
java.util.Properties allProps = Sage.getAllPrefs();
java.util.ArrayList xferProps = new java.util.ArrayList();
java.util.Enumeration propWalker = allProps.propertyNames();
while (propWalker.hasMoreElements())
{
String currKey = (String) propWalker.nextElement();
if (currKey.startsWith(name))
xferProps.add(currKey);
}
distributePropertyChanges((String[]) xferProps.toArray(Pooler.EMPTY_STRING_ARRAY));
}
public static void distributePropertyChange(String name)
{
if (clientMap.isEmpty() && pendingPropXfers == null)
return;
distributePropertyChanges(new String[] { name });
}
public static void distributePropertyChanges(String[] names)
{
java.util.Vector tempVec = pendingPropXfers;
if (tempVec != null)
{
tempVec.addAll(java.util.Arrays.asList(names));
}
if (clientMap.isEmpty())
return;
java.util.ArrayList currClients;
synchronized (clientMap)
{
currClients = new java.util.ArrayList(clientMap.values());
}
SageTV.incrementQuanta();
for (int i = 0; i < currClients.size(); i++)
{
NetworkClient neddy = (NetworkClient) currClients.get(i);
SageTVConnection listy = neddy.clientListener;
if (listy != null)
{
// We can't hold this lock because if the call fails we'll have the connection lock during
// cleanup which can cause a deadlock
//synchronized (listy)
{
if (neddy.alive)
{
listy.updateProperties(names);
}
}
}
}
}
public static void distributeInactiveFile(String filename)
{
if (clientMap.isEmpty())
return;
java.util.ArrayList currClients;
synchronized (clientMap)
{
currClients = new java.util.ArrayList(clientMap.values());
}
for (int i = 0; i < currClients.size(); i++)
{
NetworkClient neddy = (NetworkClient) currClients.get(i);
SageTVConnection listy = neddy.serverNotifier;
if (listy != null)
{
// We can't hold this lock because if the call fails we'll have the connection lock during
// cleanup which can cause a deadlock
//synchronized (listy)
{
if (neddy.alive)
{
listy.inactiveFile(filename);
}
}
}
}
}
public static void distributeScheduleChangedAsync()
{
if (!clientMap.isEmpty())
{
Pooler.execute(new Runnable()
{
public void run()
{
java.util.ArrayList currClients;
synchronized (clientMap)
{
currClients = new java.util.ArrayList(clientMap.values());
}
for (int i = 0; i < currClients.size(); i++)
{
NetworkClient neddy = (NetworkClient) currClients.get(i);
SageTVConnection listy = neddy.serverNotifier;
if (listy != null)
{
// We can't hold this lock because if the call fails we'll have the connection lock during
// cleanup which can cause a deadlock
//synchronized (listy)
{
if (neddy.alive)
{
listy.scheduleChanged();
}
}
}
}
}
}, "AsyncSchedChangeDistru");
}
}
public static boolean isCSConnectionRestoring() { return Sage.client && SageTV.neddy != null && SageTV.neddy.restoring; }
public static SageTVConnection getSN() { return SageTV.neddy.serverNotifier; }
public static SageTVConnection getCL() { return SageTV.neddy.clientListener; }
// This is what's used when the client has an error in communicating with the server.
// At this point, we'll just reset the whole thing and start over, but at least
// the app won't hang (because that's just bad)
private boolean restoring = false;
private final Object restoreLock = new Object();
private void restoreClientServerConnection()
{
// If we try to restore this as part of the restart activity it causes lots of issues; so don't do it
if (!SageTV.isAlive())
return;
synchronized (restoreLock)
{
if (restoring || !alive) return; // protect against circularities
}
if (java.awt.EventQueue.isDispatchThread())
{
Pooler.execute(new Runnable(){public void run(){ restoreClientServerConnection(); } });
return;
}
synchronized (restoreLock)
{
if (restoring || !alive) return; // protect against circularities
restoring = true;
}
connectionState = CS_STATE_NOT_INITIALIZED;
if (Sage.DBG) System.out.println("Setting c/s connection state to: NOT_INITIALIZED");
java.util.Map uiClientMenuMap = new java.util.HashMap();
java.util.Map uiClientMFMap = new java.util.HashMap();
java.util.Map uiClientMediaTimeMap = new java.util.HashMap();
java.util.Map uiClientPlayMap = new java.util.HashMap();
java.util.Iterator walker = UIManager.getUIIterator();
while (walker.hasNext())
{
UIManager theUI = (UIManager) walker.next();
uiClientMenuMap.put(theUI, theUI.getCurrUI());
// Be sure we can reload the state of the media player as well
MediaFile currMF = theUI.getVideoFrame().getCurrFile();
if (currMF != null)
{
uiClientMFMap.put(theUI, currMF);
uiClientMediaTimeMap.put(theUI, new Long(theUI.getVideoFrame().getMediaTimeMillis(true)));
uiClientPlayMap.put(theUI, new Boolean(theUI.getVideoFrame().isPlayin()));
}
theUI.advanceUI("Server Connection Lost");
}
if (Sage.DBG) System.out.println("Client/Server connection has been lost. Cleaning up and then attempting to restore...");
startCleanup();
// For Carny, we just need to redo the sync to put it back where it should be
// Seeker doesn't do anything on the client, but it does call VF.goodbye which closes the current file
VideoFrame.closeAndWaitAll();
EPG.getInstance().goodbye();
finishCleanup();
// Now we're totally disconnected from the server and can start the rebuilding...
int numReconnects = 0;
long sleepThisTime = 15000;
while (!connect(Sage.preferredServer, false))
{
numReconnects++;
if (numReconnects > 5)
{
numReconnects = 0;
sleepThisTime = 7500;
}
try{Thread.sleep(sleepThisTime);}catch(Exception e){}
sleepThisTime *= 2;
if (Sage.autodiscoveredServer)
{
// The IP of the server may have changed...so rediscover it
ServerInfo[] currServers = discoverServers(5000);
if (currServers.length > 0 && !Sage.preferredServer.equals(currServers[0].address) && currServers[0].ready)
{
if (Sage.DBG) System.out.println("Redid discovery of SageTV server oldIP=" + Sage.preferredServer + " newIP=" + currServers[0].address);
Sage.preferredServer = currServers[0].address;
}
if (currServers.length > 0)
{
if (currServers[0].ready)
{
// We moved this up into connect so it doesn't flip/flop between states if there's a version/license error
}
else
{
connectionState = CS_STATE_SERVER_LOADING;
if (Sage.DBG) System.out.println("Setting c/s connection state to: SERVER_LOADING");
SageTV.writeOutStateInfo(SageTV.SAGETV_CLIENT_SERVER_LOADING, currServers[0].address, null);
}
}
else
{
connectionState = CS_STATE_SERVER_NOT_FOUND;
if (Sage.DBG) System.out.println("Setting c/s connection state to: SERVER_NOT_FOUND");
SageTV.writeOutStateInfo(SageTV.SAGETV_CLIENT_SERVER_NOT_FOUND, null, null);
}
}
}
connectionState = CS_STATE_FULLY_CONNECTED;
if (Sage.DBG) System.out.println("Setting c/s connection state to: FULLY_CONNECTED");
SageTV.writeOutStateInfo(SageTV.SAGETV_CLIENT_CONNECTED, Sage.preferredServer, null);
// After rebuilding, we need to call this
EPG.getInstance().resyncToProperties(true, true);
CaptureDeviceManager[] cdms = MMC.getInstance().getCaptureDeviceManagers();
for (int i = 0; i < cdms.length; i++)
if (cdms[i] instanceof ClientCaptureManager)
((ClientCaptureManager) cdms[i]).resyncToProperties(true);
clientIsActive();
synchronized (restoreLock)
{
restoring = false;
}
// Refresh the current UI for everyone that's still connected
walker = UIManager.getUIIterator();
while (walker.hasNext())
{
UIManager theUI = (UIManager) walker.next();
PseudoMenu startUI = (PseudoMenu) uiClientMenuMap.get(theUI);
if (startUI != null)
{
// Reload any file that was playing and seek back to the appropriate time and set the playstate too
// Do this before we go back to the proper UI menu so video doesn't auto-spawn itself or we kick ourselves out of
// the OSD menu because we detect nothing is playing.
MediaFile theMF = (MediaFile) uiClientMFMap.get(theUI);
// We should not re-use this object reference, we have reloaded the DB from the server so get the new proper reference
if (theMF != null)
theMF = Wizard.getInstance().getFileForID(theMF.getID());
if (theMF != null)
{
theUI.getVideoFrame().watch(theMF);
theUI.getVideoFrame().timeJump(((Long) uiClientMediaTimeMap.get(theUI)).longValue());
if (((Boolean) uiClientPlayMap.get(theUI)).booleanValue())
theUI.getVideoFrame().play();
else
theUI.getVideoFrame().pause();
}
// The screensaver resets the UI history so you can't backup into it
if (theUI.canBackupUI())
theUI.backupUI();
else
theUI.advanceUI(startUI);
// Make sure we didn't end up on the 'Server Connection Lost' screen again...and if so, just go to the main menu
PseudoMenu newUI = theUI.getCurrUI();
if (newUI != null && newUI.getBlueprint().getName().equals("Server Connection Lost"))
{
if (Sage.DBG) System.out.println("Recovery landed on Server Connection Lost...force us to the main menu");
theUI.advanceUI(UIManager.MAIN_MENU);
}
}
else
theUI.freshStartup();
/* We're up and running; broadcast our capabilties to the server */
if (Sage.DBG) System.out.println("Sending " + theUI.getCapabilities().size()
+ " capabilities: " + theUI.getCapabilities());
getSN().sendCapabilties(theUI, theUI.getCapabilities().entrySet());
}
}
public static SageTVConnection getNetworkHookConnection(String clientName)
{
NetworkClient neddy = (NetworkClient) clientMap.get(clientName);
if (neddy == null)
return null;
else
return neddy.serverNotifier;
}
public static void distributeHook(String hookName, Object[] hookVars)
{
java.util.ArrayList currClients;
synchronized (clientMap)
{
currClients = new java.util.ArrayList(clientMap.values());
}
for (int i = 0; i < currClients.size(); i++)
{
NetworkClient neddy = (NetworkClient) currClients.get(i);
SageTVConnection listy = neddy.serverNotifier;
if (listy != null)
{
// We can't hold this lock because if the call fails we'll have the connection lock during
// cleanup which can cause a deadlock
//synchronized (listy)
{
if (neddy.alive)
{
listy.sendHook(null, hookName, hookVars);
}
}
}
}
}
public static void sendRestartToLocalClients()
{
java.util.ArrayList currClients;
synchronized (clientMap)
{
if (clientMap.isEmpty()) return;
currClients = new java.util.ArrayList(clientMap.values());
}
boolean sentRestarts = false;
for (int i = 0; i < currClients.size(); i++)
{
NetworkClient neddy = (NetworkClient) currClients.get(i);
if (neddy != null && neddy.localClient)
{
neddy.serverNotifier.sendRestart();
sentRestarts = true;
}
}
if (sentRestarts)
{
// Wait for a second here to ensure that the messages actually got sent out
try{Thread.sleep(500);}catch(Exception e){}
}
}
public static boolean areNonLocalClientsConnected()
{
java.util.ArrayList currClients;
synchronized (clientMap)
{
if (clientMap.isEmpty()) return false;
currClients = new java.util.ArrayList(clientMap.values());
}
for (int i = 0; i < currClients.size(); i++)
{
NetworkClient neddy = (NetworkClient) currClients.get(i);
if (neddy != null && !neddy.localClient)
return true;
}
return false;
}
public static void communicationFailure(SageTVConnection failedConn)
{
if (Sage.DBG && failedConn != null) System.out.println("NetworkManager CommunicationFailure : " + failedConn.getClientName() + " type=" + failedConn.getLinkType());
if (Sage.client)
{
if (SageTV.neddy != null && (failedConn == SageTV.neddy.clientListener || failedConn == SageTV.neddy.serverNotifier))
SageTV.neddy.restoreClientServerConnection();
}
else
{
NetworkClient neddy = (NetworkClient) clientMap.get(failedConn.getClientName());
if (neddy != null)
neddy.fullCleanup();
}
}
public static void killAll()
{
java.util.Iterator walker = (new java.util.ArrayList(clientMap.values())).iterator();
while (walker.hasNext())
{
((NetworkClient) walker.next()).fullCleanup();
}
clientMap.clear();
clientIDMap.clear();
}
public static String[] getConnectedClients()
{
return (String[]) clientMap.keySet().toArray(Pooler.EMPTY_STRING_ARRAY);
}
public static String[] getConnectedClientIDs()
{
return (String[]) clientIDMap.keySet().toArray(Pooler.EMPTY_STRING_ARRAY);
}
public static boolean isConnectedClientContext(String context)
{
return clientMap.containsKey(context);
}
public static ServerInfo[] discoverServers(int discoveryTimeout)
{
java.util.ArrayList servers = new java.util.ArrayList();
if (Sage.DBG) System.out.println("Sending out discovery packets to find SageTV Servers...");
java.net.DatagramSocket sock = null;
try
{
// Try on the encoder discovery port which is less likely to be in use
try
{
sock = new java.net.DatagramSocket(Sage.getInt("encoding_discovery_port", 8271));
}
catch (java.net.BindException be2)
{
// Just create it wherever
sock = new java.net.DatagramSocket();
}
java.net.DatagramPacket pack = new java.net.DatagramPacket(new byte[512], 512);
byte[] data = pack.getData();
data[0] = 'S';
data[1] = 'T';
data[2] = 'V';
data[3] = Version.MAJOR_VERSION;
data[4] = Version.MINOR_VERSION;
data[5] = Version.MICRO_VERSION;
pack.setLength(32);
sock.setBroadcast(true);
// Find the broadcast address for this subnet.
// String myIP = SageTV.api("GetLocalIPAddress", new Object[0]).toString();
// int lastIdx = myIP.lastIndexOf('.');
// myIP = myIP.substring(0, lastIdx) + ".255";
pack.setAddress(java.net.InetAddress.getByName("255.255.255.255"));
pack.setPort(Sage.getInt("discovery_port", 8270));
sock.send(pack);
long startTime = Sage.eventTime();
do
{
int currTimeout = (int)Math.max(1, (startTime + discoveryTimeout) - Sage.eventTime());
sock.setSoTimeout(currTimeout);
sock.receive(pack);
if (pack.getLength() >= 9)
{
if (Sage.DBG) System.out.println("Discovery packet received:" + pack + " from " + pack.getSocketAddress());
ServerInfo si = new ServerInfo();
if (data[0] == 'S' && data[1] == 'T' && data[2] == 'V')
{
// Check version
si.majorVer = data[3];
si.minorVer = data[4];
si.buildVer = data[5];
if (Sage.DBG) System.out.println("Server info " + si.majorVer + "." + si.minorVer + "." + si.buildVer);
if (si.majorVer > Sage.CLIENT_COMPATIBLE_MAJOR_VERSION || (si.majorVer == Sage.CLIENT_COMPATIBLE_MAJOR_VERSION &&
(si.minorVer > Sage.CLIENT_COMPATIBLE_MINOR_VERSION || (si.minorVer == Sage.CLIENT_COMPATIBLE_MINOR_VERSION &&
si.buildVer >= Sage.CLIENT_COMPATIBLE_MICRO_VERSION))))
{
si.port = ((data[6] & 0xFF) << 8) + (data[7] & 0xFF);
int descLength = (data[8] & 0xFF);
si.name = new String(data, 9, descLength, Sage.I18N_CHARSET);
si.address = pack.getAddress().getHostAddress();
if (Sage.DBG) System.out.println("Added server info:" + si);
servers.add(si);
}
}
}
} while (true);//startTime + discoveryTimeout > Sage.eventTime());
}
catch (Exception e)
{
// System.out.println("Error discovering servers:" + e);
}
finally
{
if (sock != null)
{
try
{
sock.close();
}catch (Exception e){}
sock = null;
}
}
return (ServerInfo[]) servers.toArray(new ServerInfo[0]);
}
public static Object clientEvaluateAction(String uiContext, String methodName, Object[] methodArgs) throws Exception
{
return clientEvaluateAction(uiContext, null, methodName, methodArgs);
}
public static String translateClientNameForID(String clientId) {
return clientId;
}
public static Object clientEvaluateAction(String uiContext, String uiController, String methodName, Object[] methodArgs) throws Exception
{
NetworkClient neddy = (NetworkClient) clientMap.get(uiContext);
if (neddy == null)
{
return null;
}
SageTVConnection listy = neddy.serverNotifier;
if (listy != null)
{
// We can't hold this lock because if the call fails we'll have the connection lock during
// cleanup which can cause a deadlock
//synchronized (listy)
{
if (neddy.alive)
{
return listy.requestAction(methodName, methodArgs);
}
}
}
return null;
}
public int getConnectionState()
{
return connectionState;
}
private long createTime;
private SageTVConnection clientListener;
private SageTVConnection serverNotifier;
private boolean alive;
private String clientName;
private String clientIP;
private String clientID; // for embedded only
private boolean localClient;
private static final java.util.Map clientMap = java.util.Collections.synchronizedMap(new java.util.HashMap());
private static final java.util.Map clientIDMap = new java.util.HashMap();
private static java.util.Vector pendingPropXfers = null;
private int connectionState = CS_STATE_NOT_INITIALIZED;
/* Track the optional capabilities of the clients to dynamically alter the core */
private Map<String, String> capabilities = new HashMap<String, String>();
public static class ServerInfo
{
public byte majorVer;
public byte minorVer;
public byte buildVer;
public int port;
public String address;
public String name;
public boolean ready;
public String toString()
{
return name + ",V" + majorVer + "." + minorVer + "." + buildVer + "," + address;
}
}
}
|
googleapis/google-cloud-java | 34,808 | java-discoveryengine/proto-google-cloud-discoveryengine-v1alpha/src/main/java/com/google/cloud/discoveryengine/v1alpha/GetUriPatternDocumentDataResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/discoveryengine/v1alpha/site_search_engine_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.discoveryengine.v1alpha;
/**
*
*
* <pre>
* Response message for
* [SiteSearchEngineService.GetUriPatternDocumentData][google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.GetUriPatternDocumentData]
* method.
* </pre>
*
* Protobuf type {@code google.cloud.discoveryengine.v1alpha.GetUriPatternDocumentDataResponse}
*/
public final class GetUriPatternDocumentDataResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.discoveryengine.v1alpha.GetUriPatternDocumentDataResponse)
GetUriPatternDocumentDataResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use GetUriPatternDocumentDataResponse.newBuilder() to construct.
private GetUriPatternDocumentDataResponse(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private GetUriPatternDocumentDataResponse() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new GetUriPatternDocumentDataResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.discoveryengine.v1alpha.SiteSearchEngineServiceProto
.internal_static_google_cloud_discoveryengine_v1alpha_GetUriPatternDocumentDataResponse_descriptor;
}
@SuppressWarnings({"rawtypes"})
@java.lang.Override
protected com.google.protobuf.MapFieldReflectionAccessor internalGetMapFieldReflection(
int number) {
switch (number) {
case 1:
return internalGetDocumentDataMap();
default:
throw new RuntimeException("Invalid map field number: " + number);
}
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.discoveryengine.v1alpha.SiteSearchEngineServiceProto
.internal_static_google_cloud_discoveryengine_v1alpha_GetUriPatternDocumentDataResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.discoveryengine.v1alpha.GetUriPatternDocumentDataResponse.class,
com.google.cloud.discoveryengine.v1alpha.GetUriPatternDocumentDataResponse.Builder
.class);
}
public static final int DOCUMENT_DATA_MAP_FIELD_NUMBER = 1;
private static final class DocumentDataMapDefaultEntryHolder {
static final com.google.protobuf.MapEntry<java.lang.String, com.google.protobuf.Struct>
defaultEntry =
com.google.protobuf.MapEntry
.<java.lang.String, com.google.protobuf.Struct>newDefaultInstance(
com.google.cloud.discoveryengine.v1alpha.SiteSearchEngineServiceProto
.internal_static_google_cloud_discoveryengine_v1alpha_GetUriPatternDocumentDataResponse_DocumentDataMapEntry_descriptor,
com.google.protobuf.WireFormat.FieldType.STRING,
"",
com.google.protobuf.WireFormat.FieldType.MESSAGE,
com.google.protobuf.Struct.getDefaultInstance());
}
@SuppressWarnings("serial")
private com.google.protobuf.MapField<java.lang.String, com.google.protobuf.Struct>
documentDataMap_;
private com.google.protobuf.MapField<java.lang.String, com.google.protobuf.Struct>
internalGetDocumentDataMap() {
if (documentDataMap_ == null) {
return com.google.protobuf.MapField.emptyMapField(
DocumentDataMapDefaultEntryHolder.defaultEntry);
}
return documentDataMap_;
}
public int getDocumentDataMapCount() {
return internalGetDocumentDataMap().getMap().size();
}
/**
*
*
* <pre>
* Document data keyed by URI pattern. For example:
* document_data_map = {
* "www.url1.com/*": {
* "Categories": ["category1", "category2"]
* },
* "www.url2.com/*": {
* "Categories": ["category3"]
* }
* }
* </pre>
*
* <code>map<string, .google.protobuf.Struct> document_data_map = 1;</code>
*/
@java.lang.Override
public boolean containsDocumentDataMap(java.lang.String key) {
if (key == null) {
throw new NullPointerException("map key");
}
return internalGetDocumentDataMap().getMap().containsKey(key);
}
/** Use {@link #getDocumentDataMapMap()} instead. */
@java.lang.Override
@java.lang.Deprecated
public java.util.Map<java.lang.String, com.google.protobuf.Struct> getDocumentDataMap() {
return getDocumentDataMapMap();
}
/**
*
*
* <pre>
* Document data keyed by URI pattern. For example:
* document_data_map = {
* "www.url1.com/*": {
* "Categories": ["category1", "category2"]
* },
* "www.url2.com/*": {
* "Categories": ["category3"]
* }
* }
* </pre>
*
* <code>map<string, .google.protobuf.Struct> document_data_map = 1;</code>
*/
@java.lang.Override
public java.util.Map<java.lang.String, com.google.protobuf.Struct> getDocumentDataMapMap() {
return internalGetDocumentDataMap().getMap();
}
/**
*
*
* <pre>
* Document data keyed by URI pattern. For example:
* document_data_map = {
* "www.url1.com/*": {
* "Categories": ["category1", "category2"]
* },
* "www.url2.com/*": {
* "Categories": ["category3"]
* }
* }
* </pre>
*
* <code>map<string, .google.protobuf.Struct> document_data_map = 1;</code>
*/
@java.lang.Override
public /* nullable */ com.google.protobuf.Struct getDocumentDataMapOrDefault(
java.lang.String key,
/* nullable */
com.google.protobuf.Struct defaultValue) {
if (key == null) {
throw new NullPointerException("map key");
}
java.util.Map<java.lang.String, com.google.protobuf.Struct> map =
internalGetDocumentDataMap().getMap();
return map.containsKey(key) ? map.get(key) : defaultValue;
}
/**
*
*
* <pre>
* Document data keyed by URI pattern. For example:
* document_data_map = {
* "www.url1.com/*": {
* "Categories": ["category1", "category2"]
* },
* "www.url2.com/*": {
* "Categories": ["category3"]
* }
* }
* </pre>
*
* <code>map<string, .google.protobuf.Struct> document_data_map = 1;</code>
*/
@java.lang.Override
public com.google.protobuf.Struct getDocumentDataMapOrThrow(java.lang.String key) {
if (key == null) {
throw new NullPointerException("map key");
}
java.util.Map<java.lang.String, com.google.protobuf.Struct> map =
internalGetDocumentDataMap().getMap();
if (!map.containsKey(key)) {
throw new java.lang.IllegalArgumentException();
}
return map.get(key);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
com.google.protobuf.GeneratedMessageV3.serializeStringMapTo(
output, internalGetDocumentDataMap(), DocumentDataMapDefaultEntryHolder.defaultEntry, 1);
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (java.util.Map.Entry<java.lang.String, com.google.protobuf.Struct> entry :
internalGetDocumentDataMap().getMap().entrySet()) {
com.google.protobuf.MapEntry<java.lang.String, com.google.protobuf.Struct> documentDataMap__ =
DocumentDataMapDefaultEntryHolder.defaultEntry
.newBuilderForType()
.setKey(entry.getKey())
.setValue(entry.getValue())
.build();
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, documentDataMap__);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj
instanceof com.google.cloud.discoveryengine.v1alpha.GetUriPatternDocumentDataResponse)) {
return super.equals(obj);
}
com.google.cloud.discoveryengine.v1alpha.GetUriPatternDocumentDataResponse other =
(com.google.cloud.discoveryengine.v1alpha.GetUriPatternDocumentDataResponse) obj;
if (!internalGetDocumentDataMap().equals(other.internalGetDocumentDataMap())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (!internalGetDocumentDataMap().getMap().isEmpty()) {
hash = (37 * hash) + DOCUMENT_DATA_MAP_FIELD_NUMBER;
hash = (53 * hash) + internalGetDocumentDataMap().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.discoveryengine.v1alpha.GetUriPatternDocumentDataResponse
parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.discoveryengine.v1alpha.GetUriPatternDocumentDataResponse
parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1alpha.GetUriPatternDocumentDataResponse
parseFrom(com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.discoveryengine.v1alpha.GetUriPatternDocumentDataResponse
parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1alpha.GetUriPatternDocumentDataResponse
parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.discoveryengine.v1alpha.GetUriPatternDocumentDataResponse
parseFrom(byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1alpha.GetUriPatternDocumentDataResponse
parseFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.discoveryengine.v1alpha.GetUriPatternDocumentDataResponse
parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1alpha.GetUriPatternDocumentDataResponse
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.discoveryengine.v1alpha.GetUriPatternDocumentDataResponse
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1alpha.GetUriPatternDocumentDataResponse
parseFrom(com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.discoveryengine.v1alpha.GetUriPatternDocumentDataResponse
parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.discoveryengine.v1alpha.GetUriPatternDocumentDataResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for
* [SiteSearchEngineService.GetUriPatternDocumentData][google.cloud.discoveryengine.v1alpha.SiteSearchEngineService.GetUriPatternDocumentData]
* method.
* </pre>
*
* Protobuf type {@code google.cloud.discoveryengine.v1alpha.GetUriPatternDocumentDataResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.discoveryengine.v1alpha.GetUriPatternDocumentDataResponse)
com.google.cloud.discoveryengine.v1alpha.GetUriPatternDocumentDataResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.discoveryengine.v1alpha.SiteSearchEngineServiceProto
.internal_static_google_cloud_discoveryengine_v1alpha_GetUriPatternDocumentDataResponse_descriptor;
}
@SuppressWarnings({"rawtypes"})
protected com.google.protobuf.MapFieldReflectionAccessor internalGetMapFieldReflection(
int number) {
switch (number) {
case 1:
return internalGetDocumentDataMap();
default:
throw new RuntimeException("Invalid map field number: " + number);
}
}
@SuppressWarnings({"rawtypes"})
protected com.google.protobuf.MapFieldReflectionAccessor internalGetMutableMapFieldReflection(
int number) {
switch (number) {
case 1:
return internalGetMutableDocumentDataMap();
default:
throw new RuntimeException("Invalid map field number: " + number);
}
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.discoveryengine.v1alpha.SiteSearchEngineServiceProto
.internal_static_google_cloud_discoveryengine_v1alpha_GetUriPatternDocumentDataResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.discoveryengine.v1alpha.GetUriPatternDocumentDataResponse.class,
com.google.cloud.discoveryengine.v1alpha.GetUriPatternDocumentDataResponse.Builder
.class);
}
// Construct using
// com.google.cloud.discoveryengine.v1alpha.GetUriPatternDocumentDataResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
internalGetMutableDocumentDataMap().clear();
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.discoveryengine.v1alpha.SiteSearchEngineServiceProto
.internal_static_google_cloud_discoveryengine_v1alpha_GetUriPatternDocumentDataResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1alpha.GetUriPatternDocumentDataResponse
getDefaultInstanceForType() {
return com.google.cloud.discoveryengine.v1alpha.GetUriPatternDocumentDataResponse
.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1alpha.GetUriPatternDocumentDataResponse build() {
com.google.cloud.discoveryengine.v1alpha.GetUriPatternDocumentDataResponse result =
buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1alpha.GetUriPatternDocumentDataResponse
buildPartial() {
com.google.cloud.discoveryengine.v1alpha.GetUriPatternDocumentDataResponse result =
new com.google.cloud.discoveryengine.v1alpha.GetUriPatternDocumentDataResponse(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.discoveryengine.v1alpha.GetUriPatternDocumentDataResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.documentDataMap_ =
internalGetDocumentDataMap().build(DocumentDataMapDefaultEntryHolder.defaultEntry);
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other
instanceof com.google.cloud.discoveryengine.v1alpha.GetUriPatternDocumentDataResponse) {
return mergeFrom(
(com.google.cloud.discoveryengine.v1alpha.GetUriPatternDocumentDataResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.discoveryengine.v1alpha.GetUriPatternDocumentDataResponse other) {
if (other
== com.google.cloud.discoveryengine.v1alpha.GetUriPatternDocumentDataResponse
.getDefaultInstance()) return this;
internalGetMutableDocumentDataMap().mergeFrom(other.internalGetDocumentDataMap());
bitField0_ |= 0x00000001;
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.protobuf.MapEntry<java.lang.String, com.google.protobuf.Struct>
documentDataMap__ =
input.readMessage(
DocumentDataMapDefaultEntryHolder.defaultEntry.getParserForType(),
extensionRegistry);
internalGetMutableDocumentDataMap()
.ensureBuilderMap()
.put(documentDataMap__.getKey(), documentDataMap__.getValue());
bitField0_ |= 0x00000001;
break;
} // case 10
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private static final class DocumentDataMapConverter
implements com.google.protobuf.MapFieldBuilder.Converter<
java.lang.String, com.google.protobuf.StructOrBuilder, com.google.protobuf.Struct> {
@java.lang.Override
public com.google.protobuf.Struct build(com.google.protobuf.StructOrBuilder val) {
if (val instanceof com.google.protobuf.Struct) {
return (com.google.protobuf.Struct) val;
}
return ((com.google.protobuf.Struct.Builder) val).build();
}
@java.lang.Override
public com.google.protobuf.MapEntry<java.lang.String, com.google.protobuf.Struct>
defaultEntry() {
return DocumentDataMapDefaultEntryHolder.defaultEntry;
}
}
;
private static final DocumentDataMapConverter documentDataMapConverter =
new DocumentDataMapConverter();
private com.google.protobuf.MapFieldBuilder<
java.lang.String,
com.google.protobuf.StructOrBuilder,
com.google.protobuf.Struct,
com.google.protobuf.Struct.Builder>
documentDataMap_;
private com.google.protobuf.MapFieldBuilder<
java.lang.String,
com.google.protobuf.StructOrBuilder,
com.google.protobuf.Struct,
com.google.protobuf.Struct.Builder>
internalGetDocumentDataMap() {
if (documentDataMap_ == null) {
return new com.google.protobuf.MapFieldBuilder<>(documentDataMapConverter);
}
return documentDataMap_;
}
private com.google.protobuf.MapFieldBuilder<
java.lang.String,
com.google.protobuf.StructOrBuilder,
com.google.protobuf.Struct,
com.google.protobuf.Struct.Builder>
internalGetMutableDocumentDataMap() {
if (documentDataMap_ == null) {
documentDataMap_ = new com.google.protobuf.MapFieldBuilder<>(documentDataMapConverter);
}
bitField0_ |= 0x00000001;
onChanged();
return documentDataMap_;
}
public int getDocumentDataMapCount() {
return internalGetDocumentDataMap().ensureBuilderMap().size();
}
/**
*
*
* <pre>
* Document data keyed by URI pattern. For example:
* document_data_map = {
* "www.url1.com/*": {
* "Categories": ["category1", "category2"]
* },
* "www.url2.com/*": {
* "Categories": ["category3"]
* }
* }
* </pre>
*
* <code>map<string, .google.protobuf.Struct> document_data_map = 1;</code>
*/
@java.lang.Override
public boolean containsDocumentDataMap(java.lang.String key) {
if (key == null) {
throw new NullPointerException("map key");
}
return internalGetDocumentDataMap().ensureBuilderMap().containsKey(key);
}
/** Use {@link #getDocumentDataMapMap()} instead. */
@java.lang.Override
@java.lang.Deprecated
public java.util.Map<java.lang.String, com.google.protobuf.Struct> getDocumentDataMap() {
return getDocumentDataMapMap();
}
/**
*
*
* <pre>
* Document data keyed by URI pattern. For example:
* document_data_map = {
* "www.url1.com/*": {
* "Categories": ["category1", "category2"]
* },
* "www.url2.com/*": {
* "Categories": ["category3"]
* }
* }
* </pre>
*
* <code>map<string, .google.protobuf.Struct> document_data_map = 1;</code>
*/
@java.lang.Override
public java.util.Map<java.lang.String, com.google.protobuf.Struct> getDocumentDataMapMap() {
return internalGetDocumentDataMap().getImmutableMap();
}
/**
*
*
* <pre>
* Document data keyed by URI pattern. For example:
* document_data_map = {
* "www.url1.com/*": {
* "Categories": ["category1", "category2"]
* },
* "www.url2.com/*": {
* "Categories": ["category3"]
* }
* }
* </pre>
*
* <code>map<string, .google.protobuf.Struct> document_data_map = 1;</code>
*/
@java.lang.Override
public /* nullable */ com.google.protobuf.Struct getDocumentDataMapOrDefault(
java.lang.String key,
/* nullable */
com.google.protobuf.Struct defaultValue) {
if (key == null) {
throw new NullPointerException("map key");
}
java.util.Map<java.lang.String, com.google.protobuf.StructOrBuilder> map =
internalGetMutableDocumentDataMap().ensureBuilderMap();
return map.containsKey(key) ? documentDataMapConverter.build(map.get(key)) : defaultValue;
}
/**
*
*
* <pre>
* Document data keyed by URI pattern. For example:
* document_data_map = {
* "www.url1.com/*": {
* "Categories": ["category1", "category2"]
* },
* "www.url2.com/*": {
* "Categories": ["category3"]
* }
* }
* </pre>
*
* <code>map<string, .google.protobuf.Struct> document_data_map = 1;</code>
*/
@java.lang.Override
public com.google.protobuf.Struct getDocumentDataMapOrThrow(java.lang.String key) {
if (key == null) {
throw new NullPointerException("map key");
}
java.util.Map<java.lang.String, com.google.protobuf.StructOrBuilder> map =
internalGetMutableDocumentDataMap().ensureBuilderMap();
if (!map.containsKey(key)) {
throw new java.lang.IllegalArgumentException();
}
return documentDataMapConverter.build(map.get(key));
}
public Builder clearDocumentDataMap() {
bitField0_ = (bitField0_ & ~0x00000001);
internalGetMutableDocumentDataMap().clear();
return this;
}
/**
*
*
* <pre>
* Document data keyed by URI pattern. For example:
* document_data_map = {
* "www.url1.com/*": {
* "Categories": ["category1", "category2"]
* },
* "www.url2.com/*": {
* "Categories": ["category3"]
* }
* }
* </pre>
*
* <code>map<string, .google.protobuf.Struct> document_data_map = 1;</code>
*/
public Builder removeDocumentDataMap(java.lang.String key) {
if (key == null) {
throw new NullPointerException("map key");
}
internalGetMutableDocumentDataMap().ensureBuilderMap().remove(key);
return this;
}
/** Use alternate mutation accessors instead. */
@java.lang.Deprecated
public java.util.Map<java.lang.String, com.google.protobuf.Struct> getMutableDocumentDataMap() {
bitField0_ |= 0x00000001;
return internalGetMutableDocumentDataMap().ensureMessageMap();
}
/**
*
*
* <pre>
* Document data keyed by URI pattern. For example:
* document_data_map = {
* "www.url1.com/*": {
* "Categories": ["category1", "category2"]
* },
* "www.url2.com/*": {
* "Categories": ["category3"]
* }
* }
* </pre>
*
* <code>map<string, .google.protobuf.Struct> document_data_map = 1;</code>
*/
public Builder putDocumentDataMap(java.lang.String key, com.google.protobuf.Struct value) {
if (key == null) {
throw new NullPointerException("map key");
}
if (value == null) {
throw new NullPointerException("map value");
}
internalGetMutableDocumentDataMap().ensureBuilderMap().put(key, value);
bitField0_ |= 0x00000001;
return this;
}
/**
*
*
* <pre>
* Document data keyed by URI pattern. For example:
* document_data_map = {
* "www.url1.com/*": {
* "Categories": ["category1", "category2"]
* },
* "www.url2.com/*": {
* "Categories": ["category3"]
* }
* }
* </pre>
*
* <code>map<string, .google.protobuf.Struct> document_data_map = 1;</code>
*/
public Builder putAllDocumentDataMap(
java.util.Map<java.lang.String, com.google.protobuf.Struct> values) {
for (java.util.Map.Entry<java.lang.String, com.google.protobuf.Struct> e :
values.entrySet()) {
if (e.getKey() == null || e.getValue() == null) {
throw new NullPointerException();
}
}
internalGetMutableDocumentDataMap().ensureBuilderMap().putAll(values);
bitField0_ |= 0x00000001;
return this;
}
/**
*
*
* <pre>
* Document data keyed by URI pattern. For example:
* document_data_map = {
* "www.url1.com/*": {
* "Categories": ["category1", "category2"]
* },
* "www.url2.com/*": {
* "Categories": ["category3"]
* }
* }
* </pre>
*
* <code>map<string, .google.protobuf.Struct> document_data_map = 1;</code>
*/
public com.google.protobuf.Struct.Builder putDocumentDataMapBuilderIfAbsent(
java.lang.String key) {
java.util.Map<java.lang.String, com.google.protobuf.StructOrBuilder> builderMap =
internalGetMutableDocumentDataMap().ensureBuilderMap();
com.google.protobuf.StructOrBuilder entry = builderMap.get(key);
if (entry == null) {
entry = com.google.protobuf.Struct.newBuilder();
builderMap.put(key, entry);
}
if (entry instanceof com.google.protobuf.Struct) {
entry = ((com.google.protobuf.Struct) entry).toBuilder();
builderMap.put(key, entry);
}
return (com.google.protobuf.Struct.Builder) entry;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.discoveryengine.v1alpha.GetUriPatternDocumentDataResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.discoveryengine.v1alpha.GetUriPatternDocumentDataResponse)
private static final com.google.cloud.discoveryengine.v1alpha.GetUriPatternDocumentDataResponse
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE =
new com.google.cloud.discoveryengine.v1alpha.GetUriPatternDocumentDataResponse();
}
public static com.google.cloud.discoveryengine.v1alpha.GetUriPatternDocumentDataResponse
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<GetUriPatternDocumentDataResponse> PARSER =
new com.google.protobuf.AbstractParser<GetUriPatternDocumentDataResponse>() {
@java.lang.Override
public GetUriPatternDocumentDataResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<GetUriPatternDocumentDataResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<GetUriPatternDocumentDataResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1alpha.GetUriPatternDocumentDataResponse
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
openjdk/jdk8 | 34,538 | jdk/test/java/util/Locale/Bug7069824.java | /*
* Copyright (c) 2012, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
/*
* @test
* @bug 7069824
* @summary Verify implementation for Locale matching.
* @run main Bug7069824
*/
import java.util.*;
import java.util.Locale.*;
import static java.util.Locale.FilteringMode.*;
import static java.util.Locale.LanguageRange.*;
public class Bug7069824 {
static boolean err = false;
public static void main(String[] args) {
testLanguageRange();
testLocale();
if (err) {
throw new RuntimeException("Failed.");
}
}
private static void testLanguageRange() {
System.out.println("Test LanguageRange class...");
testConstants();
testConstructors();
testMethods();
}
private static void testLocale() {
System.out.println("Test Locale class...");
test_filter();
test_filterTags();
test_lookup();
test_lookupTag();
}
private static void testConstants() {
boolean error = false;
if (MIN_WEIGHT != 0.0) {
error = true;
System.err.println(" MIN_WEIGHT should be 0.0 but got "
+ MIN_WEIGHT);
}
if (MAX_WEIGHT != 1.0) {
error = true;
System.err.println(" MAX_WEIGHT should be 1.0 but got "
+ MAX_WEIGHT);
}
if (error) {
err = true;
System.err.println(" testConstants() failed.");
} else {
System.out.println(" testConstants() passed.");
}
}
private static void testConstructors() {
boolean error = false;
LanguageRange lr;
String range;
double weight;
range = null;
try {
lr = new LanguageRange(range);
error = true;
System.err.println(" NPE should be thrown for LanguageRange("
+ range + ").");
}
catch (NullPointerException ex) {
}
range = null;
weight = 0.8;
try {
lr = new LanguageRange(range, weight);
error = true;
System.err.println(" NPE should be thrown for LanguageRange("
+ range + ", " + weight + ").");
}
catch (NullPointerException ex) {
}
range = "elvish";
try {
lr = new LanguageRange(range);
}
catch (Exception ex) {
error = true;
System.err.println(" " + ex
+ " should not be thrown for LanguageRange(" + range + ").");
}
range = "de-DE";
try {
lr = new LanguageRange(range);
}
catch (Exception ex) {
error = true;
System.err.println(" " + ex
+ " should not be thrown for LanguageRange(" + range + ").");
}
range = "ar";
weight = 0.8;
try {
lr = new LanguageRange(range, weight);
}
catch (Exception ex) {
error = true;
System.err.println(" " + ex
+ " should not be thrown for LanguageRange(" + range + ", "
+ weight + ").");
}
range = "ja";
weight = -0.8;
try {
lr = new LanguageRange(range, weight);
error = true;
System.err.println(" IAE should be thrown for LanguageRange("
+ range + ", " + weight + ").");
}
catch (IllegalArgumentException ex) {
}
range = "Elvish";
weight = 3.0;
try {
lr = new LanguageRange(range, weight);
error = true;
System.err.println(" IAE should be thrown for LanguageRange("
+ range + ", " + weight + ").");
}
catch (IllegalArgumentException ex) {
}
String[] illformedRanges = {"-ja", "ja--JP", "en-US-", "a4r", "ar*",
"ar-*EG", "", "abcdefghijklmn", "ja-J=", "ja-opqrstuvwxyz"};
for (String r : illformedRanges) {
try {
lr = new LanguageRange(r);
error = true;
System.err.println(" IAE should be thrown for LanguageRange("
+ r + ").");
}
catch (IllegalArgumentException ex) {
}
}
if (error) {
err = true;
System.err.println(" testConstructors() failed.");
} else {
System.out.println(" testConstructors() passed.");
}
}
private static void testMethods() {
test_getRange();
test_getWeight();
test_equals();
test_parse();
test_mapEquivalents();
}
private static void test_getRange() {
boolean error = false;
String range = "ja";
double weight = 0.5;
LanguageRange lr = new LanguageRange(range, weight);
if (!lr.getRange().equals(range)) {
error = true;
System.err.println(" LanguageRange.getRange() returned unexpected value. Expected: "
+ range + ", got: " + lr.getRange());
}
range = "en-US";
weight = 0.5;
lr = new LanguageRange(range, weight);
if (!lr.getRange().equals(range.toLowerCase())) {
error = true;
System.err.println(" LanguageRange.getRange() returned unexpected value. Expected: "
+ range + ", got: " + lr.getRange());
}
if (error) {
err = true;
System.err.println(" test_getRange() failed.");
} else {
System.out.println(" test_getRange() passed.");
}
}
private static void test_getWeight() {
boolean error = false;
String range = "ja";
double weight = 0.5;
LanguageRange lr = new LanguageRange(range, weight);
if (lr.getWeight() != weight) {
error = true;
System.err.println(" LanguageRange.getWeight() returned unexpected value. Expected: "
+ weight + ", got: " + lr.getWeight());
}
range = "ja";
weight = MAX_WEIGHT; // default
lr = new LanguageRange(range);
if (!lr.getRange().equals(range) || lr.getWeight() != MAX_WEIGHT) {
error = true;
System.err.println(" LanguageRange.getWeight() returned unexpected value. Expected: "
+ weight + ", got: " + lr.getWeight());
}
if (error) {
err = true;
System.err.println(" test_getWeight() failed.");
} else {
System.out.println(" test_getWeight() passed.");
}
}
private static void test_equals() {
boolean error = false;
LanguageRange lr1 = new LanguageRange("ja", 1.0);
LanguageRange lr2 = new LanguageRange("ja");
LanguageRange lr3 = new LanguageRange("ja", 0.1);
LanguageRange lr4 = new LanguageRange("en", 1.0);
if (!lr1.equals(lr2)) {
error = true;
System.err.println(" LanguageRange(LR(ja, 1.0)).equals(LR(ja)) should return true.");
}
if (lr1.equals(lr3)) {
error = true;
System.err.println(" LanguageRange(LR(ja, 1.0)).equals(LR(ja, 0.1)) should return false.");
}
if (lr1.equals(lr4)) {
error = true;
System.err.println(" LanguageRange(LR(ja, 1.0)).equals(LR(en, 1.0)) should return false.");
}
if (lr1.equals(null)) {
error = true;
System.err.println(" LanguageRange(LR(ja, 1.0)).equals(null) should return false.");
}
if (lr1.equals("")) {
error = true;
System.err.println(" LanguageRange(LR(ja, 1.0)).equals(\"\") should return false.");
}
if (error) {
err = true;
System.err.println(" test_equals() failed.");
} else {
System.out.println(" test_equals() passed.");
}
}
private static void test_parse() {
boolean error = false;
List<LanguageRange> list;
String str = null;
try {
list = LanguageRange.parse(str);
error = true;
System.err.println(" NPE should be thrown for parse("
+ str + ").");
}
catch (NullPointerException ex) {
}
str = "";
try {
list = LanguageRange.parse("");
error = true;
System.err.println(" IAE should be thrown for parse("
+ str + ").");
}
catch (IllegalArgumentException ex) {
}
str = "ja;q=3";
try {
list = LanguageRange.parse(str);
error = true;
System.err.println("IAE should be thrown for parse("
+ str + ").");
}
catch (IllegalArgumentException ex) {
}
str = "Accept-Language: fr-FX,de-DE;q=0.5, fr-tp-x-FOO;q=0.1,"
+ "en-X-tp;q=0.6,en-FR;q=.7,de-de;q=0.8, iw;q=0.4, "
+ "he;q=0.4, de-de;q=0.5,ja, in-tpp, in-tp;q=0.2";
ArrayList<LanguageRange> expected = new ArrayList<>();
expected.add(new LanguageRange("fr-fx", 1.0));
expected.add(new LanguageRange("fr-fr", 1.0));
expected.add(new LanguageRange("ja", 1.0));
expected.add(new LanguageRange("in-tpp", 1.0));
expected.add(new LanguageRange("id-tpp", 1.0));
expected.add(new LanguageRange("en-fr", 0.7));
expected.add(new LanguageRange("en-fx", 0.7));
expected.add(new LanguageRange("en-x-tp", 0.6));
expected.add(new LanguageRange("de-de", 0.5));
expected.add(new LanguageRange("de-dd", 0.5));
expected.add(new LanguageRange("iw", 0.4));
expected.add(new LanguageRange("he", 0.4));
expected.add(new LanguageRange("in-tp", 0.2));
expected.add(new LanguageRange("id-tl", 0.2));
expected.add(new LanguageRange("id-tp", 0.2));
expected.add(new LanguageRange("in-tl", 0.2));
expected.add(new LanguageRange("fr-tp-x-foo", 0.1));
expected.add(new LanguageRange("fr-tl-x-foo", 0.1));
List<LanguageRange> got = LanguageRange.parse(str);
if (!areEqual(expected, got)) {
error = true;
System.err.println(" #1 parse() test failed.");
}
str = "Accept-Language: hak-CN;q=0.8, no-bok-NO;q=0.9, no-nyn, cmn-CN;q=0.1";
expected = new ArrayList<>();
expected.add(new LanguageRange("no-nyn", 1.0));
expected.add(new LanguageRange("nn", 1.0));
expected.add(new LanguageRange("no-bok-no", 0.9));
expected.add(new LanguageRange("nb-no", 0.9));
expected.add(new LanguageRange("hak-CN", 0.8));
expected.add(new LanguageRange("zh-hakka-CN", 0.8));
expected.add(new LanguageRange("i-hak-CN", 0.8));
expected.add(new LanguageRange("cmn-CN", 0.1));
expected.add(new LanguageRange("zh-cmn-CN", 0.1));
expected.add(new LanguageRange("zh-guoyu-CN", 0.1));
got = LanguageRange.parse(str);
if (!areEqual(expected, got)) {
error = true;
System.err.println(" #2 parse() test failed.");
}
str = "Accept-Language: rki;q=0.4, no-bok-NO;q=0.9, ccq;q=0.5";
expected = new ArrayList<>();
expected.add(new LanguageRange("no-bok-no", 0.9));
expected.add(new LanguageRange("nb-no", 0.9));
expected.add(new LanguageRange("rki", 0.4));
expected.add(new LanguageRange("ybd", 0.4));
expected.add(new LanguageRange("ccq", 0.4));
got = LanguageRange.parse(str);
if (!areEqual(expected, got)) {
error = true;
System.err.println(" #3 parse() test failed.");
}
if (error) {
err = true;
System.err.println(" test_parse() failed.");
} else {
System.out.println(" test_parse() passed.");
}
}
private static boolean areEqual(List<LanguageRange> expected,
List<LanguageRange> got) {
boolean error = false;
int expectedSize = expected.size();
int actualSize = got.size();
if (expectedSize != actualSize) {
error = true;
System.err.println(" Expected size=" + expectedSize);
for (LanguageRange lr : expected) {
System.err.println(" range=" + lr.getRange()
+ ", weight=" + lr.getWeight());
}
System.out.println(" Actual size=" + actualSize);
for (LanguageRange lr : got) {
System.err.println(" range=" + lr.getRange()
+ ", weight=" + lr.getWeight());
}
} else {
for (int i = 0; i < expectedSize; i++) {
LanguageRange lr1 = expected.get(i);
LanguageRange lr2 = got.get(i);
if (!lr1.getRange().equals(lr2.getRange())
|| lr1.getWeight() != lr2.getWeight()) {
error = true;
System.err.println(" " + i + ": Expected: range=" + lr1.getRange()
+ ", weight=" + lr1.getWeight());
System.err.println(" " + i + ": Actual: range=" + lr2.getRange()
+ ", weight=" + lr2.getWeight());
}
}
}
return !error;
}
private static void test_mapEquivalents() {
boolean error = false;
String ranges = "zh, zh-TW;q=0.8, ar;q=0.9, EN, zh-HK, ja-JP;q=0.2, es;q=0.4";
List<LanguageRange> priorityList = LanguageRange.parse(ranges);
HashMap<String, List<String>> map = null;
try {
List<LanguageRange> list =
LanguageRange.mapEquivalents(priorityList, null);
}
catch (Exception ex) {
error = true;
System.err.println(ex
+ " should not be thrown for mapEquivalents(priorityList, null).");
}
map = new HashMap<>();
try {
List<LanguageRange> list =
LanguageRange.mapEquivalents(priorityList, map);
}
catch (Exception ex) {
error = true;
System.err.println(ex
+ " should not be thrown for mapEquivalents(priorityList, empty map).");
}
ArrayList<String> equivalentList = new ArrayList<>();
equivalentList.add("ja");
equivalentList.add("ja-Hira");
map.put("ja", equivalentList);
try {
List<LanguageRange> list = LanguageRange.mapEquivalents(null, map);
error = true;
System.err.println("NPE should be thrown for mapEquivalents(null, map).");
}
catch (NullPointerException ex) {
}
map = new LinkedHashMap<>();
ArrayList<String> equivalentList1 = new ArrayList<>();
equivalentList1.add("ja");
equivalentList1.add("ja-Hira");
map.put("ja", equivalentList1);
ArrayList<String> equivalentList2 = new ArrayList<>();
equivalentList2.add("zh-Hans");
equivalentList2.add("zh-Hans-CN");
equivalentList2.add("zh-CN");
map.put("zh", equivalentList2);
ArrayList<String> equivalentList3 = new ArrayList<>();
equivalentList3.add("zh-TW");
equivalentList3.add("zh-Hant");
map.put("zh-TW", equivalentList3);
map.put("es", null);
ArrayList<String> equivalentList4 = new ArrayList<>();
map.put("en", equivalentList4);
ArrayList<String> equivalentList5 = new ArrayList<>();
equivalentList5.add("de");
map.put("zh-HK", equivalentList5);
ArrayList<LanguageRange> expected = new ArrayList<>();
expected.add(new LanguageRange("zh-hans", 1.0));
expected.add(new LanguageRange("zh-hans-cn", 1.0));
expected.add(new LanguageRange("zh-cn", 1.0));
expected.add(new LanguageRange("de", 1.0));
expected.add(new LanguageRange("ar", 0.9));
expected.add(new LanguageRange("zh-tw", 0.8));
expected.add(new LanguageRange("zh-hant", 0.8));
expected.add(new LanguageRange("ja-jp", 0.2));
expected.add(new LanguageRange("ja-hira-jp", 0.2));
List<LanguageRange> got =
LanguageRange.mapEquivalents(priorityList, map);
if (!areEqual(expected, got)) {
error = true;
}
if (error) {
err = true;
System.err.println(" test_mapEquivalents() failed.");
} else {
System.out.println(" test_mapEquivalents() passed.");
}
}
private static void test_filter() {
boolean error = false;
String ranges = "ja-JP, fr-FR";
String tags = "de-DE, en, ja-JP-hepburn, fr, he, ja-Latn-JP";
FilteringMode mode = EXTENDED_FILTERING;
List<LanguageRange> priorityList = LanguageRange.parse(ranges);
List<Locale> tagList = generateLocales(tags);
String actualLocales =
showLocales(Locale.filter(priorityList, tagList, mode));
String expectedLocales = "ja-JP-hepburn, ja-Latn-JP";
if (!expectedLocales.equals(actualLocales)) {
error = true;
showErrorMessage("#1 filter(" + mode + ")",
ranges, tags, expectedLocales, actualLocales);
}
ranges = "ja-*-JP, fr-FR";
tags = "de-DE, en, ja-JP-hepburn, fr, he, ja-Latn-JP";
mode = EXTENDED_FILTERING;
priorityList = LanguageRange.parse(ranges);
tagList = generateLocales(tags);
actualLocales = showLocales(Locale.filter(priorityList, tagList, mode));
expectedLocales = "ja-JP-hepburn, ja-Latn-JP";
if (!expectedLocales.equals(actualLocales)) {
error = true;
showErrorMessage("#2 filter(" + mode + ")",
ranges, tags, expectedLocales, actualLocales);
}
ranges = "ja-*-JP, fr-FR, de-de;q=0.2";
tags = "de-DE, en, ja-JP-hepburn, de-de, fr, he, ja-Latn-JP";
mode = AUTOSELECT_FILTERING;
priorityList = LanguageRange.parse(ranges);
tagList = generateLocales(tags);
actualLocales = showLocales(Locale.filter(priorityList, tagList, mode));
expectedLocales = "ja-JP-hepburn, ja-Latn-JP, de-DE";
if (!expectedLocales.equals(actualLocales)) {
error = true;
showErrorMessage("#3 filter(" + mode + ")",
ranges, tags,expectedLocales, actualLocales);
}
ranges = "ja-JP, fr-FR, de-de;q=0.2";
tags = "de-DE, en, ja-JP-hepburn, de-de, fr, he, ja-Latn-JP";
mode = AUTOSELECT_FILTERING;
priorityList = LanguageRange.parse(ranges);
tagList = generateLocales(tags);
actualLocales = showLocales(Locale.filter(priorityList, tagList, mode));
expectedLocales = "ja-JP-hepburn, de-DE";
if (!expectedLocales.equals(actualLocales)) {
error = true;
showErrorMessage("#4 filter(" + mode + ")",
ranges, tags, expectedLocales, actualLocales);
}
ranges = "en;q=0.2, ja-*-JP, fr-JP";
tags = "de-DE, en, ja-JP-hepburn, fr, he, ja-Latn-JP";
mode = IGNORE_EXTENDED_RANGES;
priorityList = LanguageRange.parse(ranges);
tagList = generateLocales(tags);
actualLocales = showLocales(Locale.filter(priorityList, tagList, mode));
expectedLocales = "en";
if (!expectedLocales.equals(actualLocales)) {
error = true;
showErrorMessage("#5 filter(" + mode + ")",
ranges, tags, expectedLocales, actualLocales);
}
ranges = "en;q=0.2, ja-*-JP, fr-JP";
tags = "de-DE, en, ja-JP-hepburn, fr, he, ja-Latn-JP";
mode = MAP_EXTENDED_RANGES;
priorityList = LanguageRange.parse(ranges);
tagList = generateLocales(tags);
actualLocales = showLocales(Locale.filter(priorityList, tagList, mode));
expectedLocales = "ja-JP-hepburn, en";
if (!expectedLocales.equals(actualLocales)) {
error = true;
showErrorMessage("#6 filter(" + mode + ")",
ranges, tags, expectedLocales, actualLocales);
}
ranges = "en;q=0.2, ja-JP, fr-JP";
tags = "de-DE, en, ja-JP-hepburn, fr, he, ja-Latn-JP";
mode = REJECT_EXTENDED_RANGES;
priorityList = LanguageRange.parse(ranges);
tagList = generateLocales(tags);
actualLocales = showLocales(Locale.filter(priorityList, tagList, mode));
expectedLocales = "ja-JP-hepburn, en";
if (!expectedLocales.equals(actualLocales)) {
error = true;
showErrorMessage("#7 filter(" + mode + ")",
ranges, tags, expectedLocales, actualLocales);
}
ranges = "en;q=0.2, ja-*-JP, fr-JP";
tags = "de-DE, en, ja-JP-hepburn, fr, he, ja-Latn-JP";
mode = REJECT_EXTENDED_RANGES;
priorityList = LanguageRange.parse(ranges);
tagList = generateLocales(tags);
try {
actualLocales =
showLocales(Locale.filter(priorityList, tagList, mode));
error = true;
System.out.println("IAE should be thrown for filter("
+ mode + ").");
}
catch (IllegalArgumentException ex) {
}
ranges = "en;q=0.2, ja-*-JP, fr-JP";
tags = null;
mode = REJECT_EXTENDED_RANGES;
priorityList = LanguageRange.parse(ranges);
tagList = generateLocales(tags);
try {
actualLocales =
showLocales(Locale.filter(priorityList, tagList, mode));
error = true;
System.out.println("NPE should be thrown for filter(tags=null).");
}
catch (NullPointerException ex) {
}
ranges = null;
tags = "de-DE, en, ja-JP-hepburn, fr, he, ja-Latn-JP";
mode = REJECT_EXTENDED_RANGES;
try {
priorityList = LanguageRange.parse(ranges);
tagList = generateLocales(tags);
actualLocales =
showLocales(Locale.filter(priorityList, tagList, mode));
error = true;
System.out.println("NPE should be thrown for filter(ranges=null).");
}
catch (NullPointerException ex) {
}
ranges = "en;q=0.2, ja-*-JP, fr-JP";
tags = "";
mode = REJECT_EXTENDED_RANGES;
priorityList = LanguageRange.parse(ranges);
tagList = generateLocales(tags);
try {
actualLocales =
showLocales(Locale.filter(priorityList, tagList, mode));
}
catch (Exception ex) {
error = true;
System.out.println(ex
+ " should not be thrown for filter(" + ranges + ", \"\").");
}
if (error) {
err = true;
System.out.println(" test_filter() failed.");
} else {
System.out.println(" test_filter() passed.");
}
}
private static void test_filterTags() {
boolean error = false;
String ranges = "en;q=0.2, *;q=0.6, ja";
String tags = "de-DE, en, ja-JP-hepburn, fr-JP, he";
List<LanguageRange> priorityList = LanguageRange.parse(ranges);
List<String> tagList = generateLanguageTags(tags);
String actualTags =
showLanguageTags(Locale.filterTags(priorityList, tagList));
String expectedTags = tags;
if (!expectedTags.equals(actualTags)) {
error = true;
showErrorMessage("#1 filterTags()",
ranges, tags, expectedTags, actualTags);
}
ranges = "en;q=0.2, ja-JP, fr-JP";
tags = "de-DE, en, ja-JP-hepburn, fr, he";
priorityList = LanguageRange.parse(ranges);
tagList = generateLanguageTags(tags);
actualTags = showLanguageTags(Locale.filterTags(priorityList, tagList));
expectedTags = "ja-jp-hepburn, en";
if (!expectedTags.equals(actualTags)) {
error = true;
showErrorMessage("#2 filterTags()",
ranges, tags, expectedTags, actualTags);
}
ranges = "de-DE";
tags = "de-DE, de-de, de-Latn-DE, de-Latf-DE, de-DE-x-goethe, "
+ "de-Latn-DE-1996, de-Deva-DE, de, de-x-DE, de-Deva";
FilteringMode mode = MAP_EXTENDED_RANGES;
priorityList = LanguageRange.parse(ranges);
tagList = generateLanguageTags(tags);
actualTags = showLanguageTags(Locale.filterTags(priorityList, tagList, mode));
expectedTags = "de-de, de-de-x-goethe";
if (!expectedTags.equals(actualTags)) {
error = true;
showErrorMessage("#3 filterTags(" + mode + ")",
ranges, tags, expectedTags, actualTags);
}
ranges = "de-DE";
tags = "de-DE, de-de, de-Latn-DE, de-Latf-DE, de-DE-x-goethe, "
+ "de-Latn-DE-1996, de-Deva-DE, de, de-x-DE, de-Deva";
mode = EXTENDED_FILTERING;
priorityList = LanguageRange.parse(ranges);
tagList = generateLanguageTags(tags);
actualTags = showLanguageTags(Locale.filterTags(priorityList, tagList, mode));
expectedTags = "de-de, de-latn-de, de-latf-de, de-de-x-goethe, "
+ "de-latn-de-1996, de-deva-de";
if (!expectedTags.equals(actualTags)) {
error = true;
showErrorMessage("#4 filterTags(" + mode + ")",
ranges, tags, expectedTags, actualTags);
}
ranges = "de-*-DE";
tags = "de-DE, de-de, de-Latn-DE, de-Latf-DE, de-DE-x-goethe, "
+ "de-Latn-DE-1996, de-Deva-DE, de, de-x-DE, de-Deva";
mode = EXTENDED_FILTERING;
priorityList = LanguageRange.parse(ranges);
tagList = generateLanguageTags(tags);
actualTags = showLanguageTags(Locale.filterTags(priorityList, tagList, mode));
expectedTags = "de-de, de-latn-de, de-latf-de, de-de-x-goethe, "
+ "de-latn-de-1996, de-deva-de";
if (!expectedTags.equals(actualTags)) {
error = true;
showErrorMessage("#5 filterTags(" + mode + ")",
ranges, tags, expectedTags, actualTags);
}
if (error) {
err = true;
System.out.println(" test_filterTags() failed.");
} else {
System.out.println(" test_filterTags() passed.");
}
}
private static void test_lookup() {
boolean error = false;
String ranges = "en;q=0.2, *-JP;q=0.6, iw";
String tags = "de-DE, en, ja-JP-hepburn, fr-JP, he";
List<LanguageRange> priorityList = LanguageRange.parse(ranges);
List<Locale> localeList = generateLocales(tags);
String actualLocale =
Locale.lookup(priorityList, localeList).toLanguageTag();
String expectedLocale ="he";
if (!expectedLocale.equals(actualLocale)) {
error = true;
showErrorMessage("#1 lookup()", ranges, tags, expectedLocale, actualLocale);
}
ranges = "en;q=0.2, *-JP;q=0.6, iw";
tags = "de-DE, he-IL, en, iw";
priorityList = LanguageRange.parse(ranges);
localeList = generateLocales(tags);
actualLocale = Locale.lookup(priorityList, localeList).toLanguageTag();
expectedLocale = "he";
if (!expectedLocale.equals(actualLocale)) {
error = true;
showErrorMessage("#2 lookup()", ranges, tags, expectedLocale, actualLocale);
}
ranges = "en;q=0.2, ja-*-JP-x-foo;q=0.6, iw";
tags = "de-DE, fr, en, ja-Latn-JP";
priorityList = LanguageRange.parse(ranges);
localeList = generateLocales(tags);
actualLocale = Locale.lookup(priorityList, localeList).toLanguageTag();
expectedLocale = "ja-Latn-JP";
if (!expectedLocale.equals(actualLocale)) {
error = true;
showErrorMessage("#3 lookup()", ranges, tags, expectedLocale, actualLocale);
}
if (error) {
err = true;
System.out.println(" test_lookup() failed.");
} else {
System.out.println(" test_lookup() passed.");
}
}
private static void test_lookupTag() {
boolean error = false;
String ranges = "en, *";
String tags = "es, de, ja-JP";
List<LanguageRange> priorityList = LanguageRange.parse(ranges);
List<String> tagList = generateLanguageTags(tags);
String actualTag = Locale.lookupTag(priorityList, tagList);
String expectedTag = null;
if (actualTag != null) {
error = true;
showErrorMessage("#1 lookupTag()", ranges, tags, expectedTag, actualTag);
}
ranges= "en;q=0.2, *-JP";
tags = "de-DE, en, ja-JP-hepburn, fr-JP, en-JP";
priorityList = LanguageRange.parse(ranges);
tagList = generateLanguageTags(tags);
actualTag = Locale.lookupTag(priorityList, tagList);
expectedTag = "fr-jp";
if (!expectedTag.equals(actualTag)) {
error = true;
showErrorMessage("#2 lookupTag()", ranges, tags, expectedTag, actualTag);
}
ranges = "en;q=0.2, ar-MO, iw";
tags = "de-DE, he, fr-JP";
priorityList = LanguageRange.parse(ranges);
tagList = generateLanguageTags(tags);
actualTag = Locale.lookupTag(priorityList, tagList);
expectedTag = "he";
if (!expectedTag.equals(actualTag)) {
error = true;
showErrorMessage("#3 lookupTag()", ranges, tags, expectedTag, actualTag);
}
ranges = "en;q=0.2, ar-MO, he";
tags = "de-DE, iw, fr-JP";
priorityList = LanguageRange.parse(ranges);
tagList = generateLanguageTags(tags);
actualTag = Locale.lookupTag(priorityList, tagList);
expectedTag = "iw";
if (!expectedTag.equals(actualTag)) {
error = true;
showErrorMessage("#4 lookupTag()", ranges, tags, expectedTag, actualTag);
}
if (error) {
err = true;
System.out.println(" test_lookupTag() failed.");
} else {
System.out.println(" test_lookupTag() passed.");
}
}
private static List<Locale> generateLocales(String tags) {
if (tags == null) {
return null;
}
List<Locale> localeList = new ArrayList<>();
if (tags.equals("")) {
return localeList;
}
String[] t = tags.split(", ");
for (String tag : t) {
localeList.add(Locale.forLanguageTag(tag));
}
return localeList;
}
private static List<String> generateLanguageTags(String tags) {
List<String> tagList = new ArrayList<>();
String[] t = tags.split(", ");
for (String tag : t) {
tagList.add(tag);
}
return tagList;
}
private static String showPriorityList(List<LanguageRange> priorityList) {
StringBuilder sb = new StringBuilder();
Iterator<LanguageRange> itr = priorityList.iterator();
LanguageRange lr;
if (itr.hasNext()) {
lr = itr.next();
sb.append(lr.getRange());
sb.append(";q=");
sb.append(lr.getWeight());
}
while (itr.hasNext()) {
sb.append(", ");
lr = itr.next();
sb.append(lr.getRange());
sb.append(";q=");
sb.append(lr.getWeight());
}
return sb.toString();
}
private static String showLanguageTags(List<String> tags) {
StringBuilder sb = new StringBuilder();
Iterator<String> itr = tags.iterator();
if (itr.hasNext()) {
sb.append(itr.next());
}
while (itr.hasNext()) {
sb.append(", ");
sb.append(itr.next());
}
return sb.toString().trim();
}
private static String showLocales(List<Locale> locales) {
StringBuilder sb = new StringBuilder();
Iterator<Locale> itr = locales.iterator();
if (itr.hasNext()) {
sb.append(itr.next().toLanguageTag());
}
while (itr.hasNext()) {
sb.append(", ");
sb.append(itr.next().toLanguageTag());
}
return sb.toString().trim();
}
private static void showErrorMessage(String methodName,
String priorityList,
String tags,
String expectedTags,
String actualTags) {
System.out.println("\nIncorrect " + methodName + " result.");
System.out.println(" Priority list : " + priorityList);
System.out.println(" Language tags : " + tags);
System.out.println(" Expected value : " + expectedTags);
System.out.println(" Actual value : " + actualTags);
}
}
|
apache/hadoop-common | 34,940 | hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/snative/SwiftNativeFileSystemStore.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.fs.swift.snative;
import org.apache.commons.httpclient.Header;
import org.apache.commons.httpclient.HttpStatus;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileAlreadyExistsException;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.swift.exceptions.SwiftConfigurationException;
import org.apache.hadoop.fs.swift.exceptions.SwiftException;
import org.apache.hadoop.fs.swift.exceptions.SwiftInvalidResponseException;
import org.apache.hadoop.fs.swift.exceptions.SwiftOperationFailedException;
import org.apache.hadoop.fs.swift.http.HttpBodyContent;
import org.apache.hadoop.fs.swift.http.SwiftProtocolConstants;
import org.apache.hadoop.fs.swift.http.SwiftRestClient;
import org.apache.hadoop.fs.swift.util.DurationStats;
import org.apache.hadoop.fs.swift.util.JSONUtil;
import org.apache.hadoop.fs.swift.util.SwiftObjectPath;
import org.apache.hadoop.fs.swift.util.SwiftUtils;
import org.codehaus.jackson.map.type.CollectionType;
import java.io.ByteArrayInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.io.InterruptedIOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* File system store implementation.
* Makes REST requests, parses data from responses
*/
public class SwiftNativeFileSystemStore {
private static final Pattern URI_PATTERN = Pattern.compile("\"\\S+?\"");
private static final String PATTERN = "EEE, d MMM yyyy hh:mm:ss zzz";
private static final Log LOG =
LogFactory.getLog(SwiftNativeFileSystemStore.class);
private URI uri;
private SwiftRestClient swiftRestClient;
/**
* Initalize the filesystem store -this creates the REST client binding.
*
* @param fsURI URI of the filesystem, which is used to map to the filesystem-specific
* options in the configuration file
* @param configuration configuration
* @throws IOException on any failure.
*/
public void initialize(URI fsURI, Configuration configuration) throws IOException {
this.uri = fsURI;
this.swiftRestClient = SwiftRestClient.getInstance(fsURI, configuration);
}
@Override
public String toString() {
return "SwiftNativeFileSystemStore with "
+ swiftRestClient;
}
/**
* Get the default blocksize of this (bound) filesystem
* @return the blocksize returned for all FileStatus queries,
* which is used by the MapReduce splitter.
*/
public long getBlocksize() {
return 1024L * swiftRestClient.getBlocksizeKB();
}
public long getPartsizeKB() {
return swiftRestClient.getPartSizeKB();
}
public int getBufferSizeKB() {
return swiftRestClient.getBufferSizeKB();
}
public int getThrottleDelay() {
return swiftRestClient.getThrottleDelay();
}
/**
* Upload a file/input stream of a specific length.
*
* @param path destination path in the swift filesystem
* @param inputStream input data. This is closed afterwards, always
* @param length length of the data
* @throws IOException on a problem
*/
public void uploadFile(Path path, InputStream inputStream, long length)
throws IOException {
swiftRestClient.upload(toObjectPath(path), inputStream, length);
}
/**
* Upload part of a larger file.
*
* @param path destination path
* @param partNumber item number in the path
* @param inputStream input data
* @param length length of the data
* @throws IOException on a problem
*/
public void uploadFilePart(Path path, int partNumber,
InputStream inputStream, long length)
throws IOException {
String stringPath = path.toUri().toString();
String partitionFilename = SwiftUtils.partitionFilenameFromNumber(
partNumber);
if (stringPath.endsWith("/")) {
stringPath = stringPath.concat(partitionFilename);
} else {
stringPath = stringPath.concat("/").concat(partitionFilename);
}
swiftRestClient.upload(
new SwiftObjectPath(toDirPath(path).getContainer(), stringPath),
inputStream,
length);
}
/**
* Tell the Swift server to expect a multi-part upload by submitting
* a 0-byte file with the X-Object-Manifest header
*
* @param path path of final final
* @throws IOException
*/
public void createManifestForPartUpload(Path path) throws IOException {
String pathString = toObjectPath(path).toString();
if (!pathString.endsWith("/")) {
pathString = pathString.concat("/");
}
if (pathString.startsWith("/")) {
pathString = pathString.substring(1);
}
swiftRestClient.upload(toObjectPath(path),
new ByteArrayInputStream(new byte[0]),
0,
new Header(SwiftProtocolConstants.X_OBJECT_MANIFEST, pathString));
}
/**
* Get the metadata of an object
*
* @param path path
* @return file metadata. -or null if no headers were received back from the server.
* @throws IOException on a problem
* @throws FileNotFoundException if there is nothing at the end
*/
public SwiftFileStatus getObjectMetadata(Path path) throws IOException {
return getObjectMetadata(path, true);
}
/**
* Get the HTTP headers, in case you really need the low-level
* metadata
* @param path path to probe
* @param newest newest or oldest?
* @return the header list
* @throws IOException IO problem
* @throws FileNotFoundException if there is nothing at the end
*/
public Header[] getObjectHeaders(Path path, boolean newest)
throws IOException, FileNotFoundException {
SwiftObjectPath objectPath = toObjectPath(path);
return stat(objectPath, newest);
}
/**
* Get the metadata of an object
*
* @param path path
* @param newest flag to say "set the newest header", otherwise take any entry
* @return file metadata. -or null if no headers were received back from the server.
* @throws IOException on a problem
* @throws FileNotFoundException if there is nothing at the end
*/
public SwiftFileStatus getObjectMetadata(Path path, boolean newest)
throws IOException, FileNotFoundException {
SwiftObjectPath objectPath = toObjectPath(path);
final Header[] headers = stat(objectPath, newest);
//no headers is treated as a missing file
if (headers.length == 0) {
throw new FileNotFoundException("Not Found " + path.toUri());
}
boolean isDir = false;
long length = 0;
long lastModified = 0 ;
for (Header header : headers) {
String headerName = header.getName();
if (headerName.equals(SwiftProtocolConstants.X_CONTAINER_OBJECT_COUNT) ||
headerName.equals(SwiftProtocolConstants.X_CONTAINER_BYTES_USED)) {
length = 0;
isDir = true;
}
if (SwiftProtocolConstants.HEADER_CONTENT_LENGTH.equals(headerName)) {
length = Long.parseLong(header.getValue());
}
if (SwiftProtocolConstants.HEADER_LAST_MODIFIED.equals(headerName)) {
final SimpleDateFormat simpleDateFormat = new SimpleDateFormat(PATTERN);
try {
lastModified = simpleDateFormat.parse(header.getValue()).getTime();
} catch (ParseException e) {
throw new SwiftException("Failed to parse " + header.toString(), e);
}
}
}
if (lastModified == 0) {
lastModified = System.currentTimeMillis();
}
Path correctSwiftPath = getCorrectSwiftPath(path);
return new SwiftFileStatus(length,
isDir,
1,
getBlocksize(),
lastModified,
correctSwiftPath);
}
private Header[] stat(SwiftObjectPath objectPath, boolean newest) throws
IOException {
Header[] headers;
if (newest) {
headers = swiftRestClient.headRequest("getObjectMetadata-newest",
objectPath, SwiftRestClient.NEWEST);
} else {
headers = swiftRestClient.headRequest("getObjectMetadata",
objectPath);
}
return headers;
}
/**
* Get the object as an input stream
*
* @param path object path
* @return the input stream -this must be closed to terminate the connection
* @throws IOException IO problems
* @throws FileNotFoundException path doesn't resolve to an object
*/
public HttpBodyContent getObject(Path path) throws IOException {
return swiftRestClient.getData(toObjectPath(path),
SwiftRestClient.NEWEST);
}
/**
* Get the input stream starting from a specific point.
*
* @param path path to object
* @param byteRangeStart starting point
* @param length no. of bytes
* @return an input stream that must be closed
* @throws IOException IO problems
*/
public HttpBodyContent getObject(Path path, long byteRangeStart, long length)
throws IOException {
return swiftRestClient.getData(
toObjectPath(path), byteRangeStart, length);
}
/**
* List a directory.
* This is O(n) for the number of objects in this path.
*
*
*
* @param path working path
* @param listDeep ask for all the data
* @param newest ask for the newest data
* @return Collection of file statuses
* @throws IOException IO problems
* @throws FileNotFoundException if the path does not exist
*/
private List<FileStatus> listDirectory(SwiftObjectPath path,
boolean listDeep,
boolean newest) throws IOException {
final byte[] bytes;
final ArrayList<FileStatus> files = new ArrayList<FileStatus>();
final Path correctSwiftPath = getCorrectSwiftPath(path);
try {
bytes = swiftRestClient.listDeepObjectsInDirectory(path, listDeep);
} catch (FileNotFoundException e) {
if (LOG.isDebugEnabled()) {
LOG.debug("" +
"File/Directory not found " + path);
}
if (SwiftUtils.isRootDir(path)) {
return Collections.emptyList();
} else {
throw e;
}
} catch (SwiftInvalidResponseException e) {
//bad HTTP error code
if (e.getStatusCode() == HttpStatus.SC_NO_CONTENT) {
//this can come back on a root list if the container is empty
if (SwiftUtils.isRootDir(path)) {
return Collections.emptyList();
} else {
//NO_CONTENT returned on something other than the root directory;
//see if it is there, and convert to empty list or not found
//depending on whether the entry exists.
FileStatus stat = getObjectMetadata(correctSwiftPath, newest);
if (stat.isDirectory()) {
//it's an empty directory. state that
return Collections.emptyList();
} else {
//it's a file -return that as the status
files.add(stat);
return files;
}
}
} else {
//a different status code: rethrow immediately
throw e;
}
}
final CollectionType collectionType = JSONUtil.getJsonMapper().getTypeFactory().
constructCollectionType(List.class, SwiftObjectFileStatus.class);
final List<SwiftObjectFileStatus> fileStatusList =
JSONUtil.toObject(new String(bytes), collectionType);
//this can happen if user lists file /data/files/file
//in this case swift will return empty array
if (fileStatusList.isEmpty()) {
SwiftFileStatus objectMetadata = getObjectMetadata(correctSwiftPath,
newest);
if (objectMetadata.isFile()) {
files.add(objectMetadata);
}
return files;
}
for (SwiftObjectFileStatus status : fileStatusList) {
if (status.getName() != null) {
files.add(new SwiftFileStatus(status.getBytes(),
status.getBytes() == 0,
1,
getBlocksize(),
status.getLast_modified().getTime(),
getCorrectSwiftPath(new Path(status.getName()))));
}
}
return files;
}
/**
* List all elements in this directory
*
*
*
* @param path path to work with
* @param recursive do a recursive get
* @param newest ask for the newest, or can some out of date data work?
* @return the file statuses, or an empty array if there are no children
* @throws IOException on IO problems
* @throws FileNotFoundException if the path is nonexistent
*/
public FileStatus[] listSubPaths(Path path,
boolean recursive,
boolean newest) throws IOException {
final Collection<FileStatus> fileStatuses;
fileStatuses = listDirectory(toDirPath(path), recursive, newest);
return fileStatuses.toArray(new FileStatus[fileStatuses.size()]);
}
/**
* Create a directory
*
* @param path path
* @throws IOException
*/
public void createDirectory(Path path) throws IOException {
innerCreateDirectory(toDirPath(path));
}
/**
* The inner directory creation option. This only creates
* the dir at the given path, not any parent dirs.
* @param swiftObjectPath swift object path at which a 0-byte blob should be
* put
* @throws IOException IO problems
*/
private void innerCreateDirectory(SwiftObjectPath swiftObjectPath)
throws IOException {
swiftRestClient.putRequest(swiftObjectPath);
}
private SwiftObjectPath toDirPath(Path path) throws
SwiftConfigurationException {
return SwiftObjectPath.fromPath(uri, path, false);
}
private SwiftObjectPath toObjectPath(Path path) throws
SwiftConfigurationException {
return SwiftObjectPath.fromPath(uri, path);
}
/**
* Try to find the specific server(s) on which the data lives
* @param path path to probe
* @return a possibly empty list of locations
* @throws IOException on problems determining the locations
*/
public List<URI> getObjectLocation(Path path) throws IOException {
final byte[] objectLocation;
objectLocation = swiftRestClient.getObjectLocation(toObjectPath(path));
if (objectLocation == null || objectLocation.length == 0) {
//no object location, return an empty list
return new LinkedList<URI>();
}
return extractUris(new String(objectLocation), path);
}
/**
* deletes object from Swift
*
* @param path path to delete
* @return true if the path was deleted by this specific operation.
* @throws IOException on a failure
*/
public boolean deleteObject(Path path) throws IOException {
SwiftObjectPath swiftObjectPath = toObjectPath(path);
if (!SwiftUtils.isRootDir(swiftObjectPath)) {
return swiftRestClient.delete(swiftObjectPath);
} else {
if (LOG.isDebugEnabled()) {
LOG.debug("Not deleting root directory entry");
}
return true;
}
}
/**
* deletes a directory from Swift. This is not recursive
*
* @param path path to delete
* @return true if the path was deleted by this specific operation -or
* the path was root and not acted on.
* @throws IOException on a failure
*/
public boolean rmdir(Path path) throws IOException {
return deleteObject(path);
}
/**
* Does the object exist
*
* @param path object path
* @return true if the metadata of an object could be retrieved
* @throws IOException IO problems other than FileNotFound, which
* is downgraded to an object does not exist return code
*/
public boolean objectExists(Path path) throws IOException {
return objectExists(toObjectPath(path));
}
/**
* Does the object exist
*
* @param path swift object path
* @return true if the metadata of an object could be retrieved
* @throws IOException IO problems other than FileNotFound, which
* is downgraded to an object does not exist return code
*/
public boolean objectExists(SwiftObjectPath path) throws IOException {
try {
Header[] headers = swiftRestClient.headRequest("objectExists",
path,
SwiftRestClient.NEWEST);
//no headers is treated as a missing file
return headers.length != 0;
} catch (FileNotFoundException e) {
return false;
}
}
/**
* Rename through copy-and-delete. this is a consequence of the
* Swift filesystem using the path as the hash
* into the Distributed Hash Table, "the ring" of filenames.
* <p/>
* Because of the nature of the operation, it is not atomic.
*
* @param src source file/dir
* @param dst destination
* @throws IOException IO failure
* @throws SwiftOperationFailedException if the rename failed
* @throws FileNotFoundException if the source directory is missing, or
* the parent directory of the destination
*/
public void rename(Path src, Path dst)
throws FileNotFoundException, SwiftOperationFailedException, IOException {
if (LOG.isDebugEnabled()) {
LOG.debug("mv " + src + " " + dst);
}
boolean renamingOnToSelf = src.equals(dst);
SwiftObjectPath srcObject = toObjectPath(src);
SwiftObjectPath destObject = toObjectPath(dst);
if (SwiftUtils.isRootDir(srcObject)) {
throw new SwiftOperationFailedException("cannot rename root dir");
}
final SwiftFileStatus srcMetadata;
srcMetadata = getObjectMetadata(src);
SwiftFileStatus dstMetadata;
try {
dstMetadata = getObjectMetadata(dst);
} catch (FileNotFoundException e) {
//destination does not exist.
LOG.debug("Destination does not exist");
dstMetadata = null;
}
//check to see if the destination parent directory exists
Path srcParent = src.getParent();
Path dstParent = dst.getParent();
//skip the overhead of a HEAD call if the src and dest share the same
//parent dir (in which case the dest dir exists), or the destination
//directory is root, in which case it must also exist
if (dstParent != null && !dstParent.equals(srcParent)) {
try {
getObjectMetadata(dstParent);
} catch (FileNotFoundException e) {
//destination parent doesn't exist; bail out
LOG.debug("destination parent directory " + dstParent + " doesn't exist");
throw e;
}
}
boolean destExists = dstMetadata != null;
boolean destIsDir = destExists && SwiftUtils.isDirectory(dstMetadata);
//calculate the destination
SwiftObjectPath destPath;
//enum the child entries and everything underneath
List<FileStatus> childStats = listDirectory(srcObject, true, true);
boolean srcIsFile = !srcMetadata.isDir();
if (srcIsFile) {
//source is a simple file OR a partitioned file
// outcomes:
// #1 dest exists and is file: fail
// #2 dest exists and is dir: destination path becomes under dest dir
// #3 dest does not exist: use dest as name
if (destExists) {
if (destIsDir) {
//outcome #2 -move to subdir of dest
destPath = toObjectPath(new Path(dst, src.getName()));
} else {
//outcome #1 dest it's a file: fail if differeent
if (!renamingOnToSelf) {
throw new FileAlreadyExistsException(
"cannot rename a file over one that already exists");
} else {
//is mv self self where self is a file. this becomes a no-op
LOG.debug("Renaming file onto self: no-op => success");
return;
}
}
} else {
//outcome #3 -new entry
destPath = toObjectPath(dst);
}
int childCount = childStats.size();
//here there is one of:
// - a single object ==> standard file
// ->
if (childCount == 0) {
copyThenDeleteObject(srcObject, destPath);
} else {
//do the copy
SwiftUtils.debug(LOG, "Source file appears to be partitioned." +
" copying file and deleting children");
copyObject(srcObject, destPath);
for (FileStatus stat : childStats) {
SwiftUtils.debug(LOG, "Deleting partitioned file %s ", stat);
deleteObject(stat.getPath());
}
swiftRestClient.delete(srcObject);
}
} else {
//here the source exists and is a directory
// outcomes (given we know the parent dir exists if we get this far)
// #1 destination is a file: fail
// #2 destination is a directory: create a new dir under that one
// #3 destination doesn't exist: create a new dir with that name
// #3 and #4 are only allowed if the dest path is not == or under src
if (destExists && !destIsDir) {
// #1 destination is a file: fail
throw new FileAlreadyExistsException(
"the source is a directory, but not the destination");
}
Path targetPath;
if (destExists) {
// #2 destination is a directory: create a new dir under that one
targetPath = new Path(dst, src.getName());
} else {
// #3 destination doesn't exist: create a new dir with that name
targetPath = dst;
}
SwiftObjectPath targetObjectPath = toObjectPath(targetPath);
//final check for any recursive operations
if (srcObject.isEqualToOrParentOf(targetObjectPath)) {
//you can't rename a directory onto itself
throw new SwiftOperationFailedException(
"cannot move a directory under itself");
}
LOG.info("mv " + srcObject + " " + targetPath);
logDirectory("Directory to copy ", srcObject, childStats);
// iterative copy of everything under the directory.
// by listing all children this can be done iteratively
// rather than recursively -everything in this list is either a file
// or a 0-byte-len file pretending to be a directory.
String srcURI = src.toUri().toString();
int prefixStripCount = srcURI.length() + 1;
for (FileStatus fileStatus : childStats) {
Path copySourcePath = fileStatus.getPath();
String copySourceURI = copySourcePath.toUri().toString();
String copyDestSubPath = copySourceURI.substring(prefixStripCount);
Path copyDestPath = new Path(targetPath, copyDestSubPath);
if (LOG.isTraceEnabled()) {
//trace to debug some low-level rename path problems; retained
//in case they ever come back.
LOG.trace("srcURI=" + srcURI
+ "; copySourceURI=" + copySourceURI
+ "; copyDestSubPath=" + copyDestSubPath
+ "; copyDestPath=" + copyDestPath);
}
SwiftObjectPath copyDestination = toObjectPath(copyDestPath);
try {
copyThenDeleteObject(toObjectPath(copySourcePath),
copyDestination);
} catch (FileNotFoundException e) {
LOG.info("Skipping rename of " + copySourcePath);
}
//add a throttle delay
throttle();
}
//now rename self. If missing, create the dest directory and warn
if (!SwiftUtils.isRootDir(srcObject)) {
try {
copyThenDeleteObject(srcObject,
targetObjectPath);
} catch (FileNotFoundException e) {
//create the destination directory
LOG.warn("Source directory deleted during rename", e);
innerCreateDirectory(destObject);
}
}
}
}
/**
* Debug action to dump directory statuses to the debug log
*
* @param message explanation
* @param objectPath object path (can be null)
* @param statuses listing output
*/
private void logDirectory(String message, SwiftObjectPath objectPath,
Iterable<FileStatus> statuses) {
if (LOG.isDebugEnabled()) {
LOG.debug(message + ": listing of " + objectPath);
for (FileStatus fileStatus : statuses) {
LOG.debug(fileStatus.getPath());
}
}
}
public void copy(Path srcKey, Path dstKey) throws IOException {
SwiftObjectPath srcObject = toObjectPath(srcKey);
SwiftObjectPath destObject = toObjectPath(dstKey);
swiftRestClient.copyObject(srcObject, destObject);
}
/**
* Copy an object then, if the copy worked, delete it.
* If the copy failed, the source object is not deleted.
*
* @param srcObject source object path
* @param destObject destination object path
* @throws IOException IO problems
*/
private void copyThenDeleteObject(SwiftObjectPath srcObject,
SwiftObjectPath destObject) throws
IOException {
//do the copy
copyObject(srcObject, destObject);
//getting here means the copy worked
swiftRestClient.delete(srcObject);
}
/**
* Copy an object
* @param srcObject source object path
* @param destObject destination object path
* @throws IOException IO problems
*/
private void copyObject(SwiftObjectPath srcObject,
SwiftObjectPath destObject) throws
IOException {
if (srcObject.isEqualToOrParentOf(destObject)) {
throw new SwiftException(
"Can't copy " + srcObject + " onto " + destObject);
}
//do the copy
boolean copySucceeded = swiftRestClient.copyObject(srcObject, destObject);
if (!copySucceeded) {
throw new SwiftException("Copy of " + srcObject + " to "
+ destObject + "failed");
}
}
/**
* Take a Hadoop path and return one which uses the URI prefix and authority
* of this FS. It doesn't make a relative path absolute
* @param path path in
* @return path with a URI bound to this FS
* @throws SwiftException URI cannot be created.
*/
public Path getCorrectSwiftPath(Path path) throws
SwiftException {
try {
final URI fullUri = new URI(uri.getScheme(),
uri.getAuthority(),
path.toUri().getPath(),
null,
null);
return new Path(fullUri);
} catch (URISyntaxException e) {
throw new SwiftException("Specified path " + path + " is incorrect", e);
}
}
/**
* Builds a hadoop-Path from a swift path, inserting the URI authority
* of this FS instance
* @param path swift object path
* @return Hadoop path
* @throws SwiftException if the URI couldn't be created.
*/
private Path getCorrectSwiftPath(SwiftObjectPath path) throws
SwiftException {
try {
final URI fullUri = new URI(uri.getScheme(),
uri.getAuthority(),
path.getObject(),
null,
null);
return new Path(fullUri);
} catch (URISyntaxException e) {
throw new SwiftException("Specified path " + path + " is incorrect", e);
}
}
/**
* extracts URIs from json
* @param json json to parse
* @param path path (used in exceptions)
* @return URIs
* @throws SwiftOperationFailedException on any problem parsing the JSON
*/
public static List<URI> extractUris(String json, Path path) throws
SwiftOperationFailedException {
final Matcher matcher = URI_PATTERN.matcher(json);
final List<URI> result = new ArrayList<URI>();
while (matcher.find()) {
final String s = matcher.group();
final String uri = s.substring(1, s.length() - 1);
try {
URI createdUri = URI.create(uri);
result.add(createdUri);
} catch (IllegalArgumentException e) {
//failure to create the URI, which means this is bad JSON. Convert
//to an exception with useful text
throw new SwiftOperationFailedException(
String.format(
"could not convert \"%s\" into a URI." +
" source: %s " +
" first JSON: %s",
uri, path, json.substring(0, 256)));
}
}
return result;
}
/**
* Insert a throttled wait if the throttle delay >0
* @throws InterruptedIOException if interrupted during sleep
*/
public void throttle() throws InterruptedIOException {
int throttleDelay = getThrottleDelay();
if (throttleDelay > 0) {
try {
Thread.sleep(throttleDelay);
} catch (InterruptedException e) {
//convert to an IOE
throw (InterruptedIOException) new InterruptedIOException(e.toString())
.initCause(e);
}
}
}
/**
* Get the current operation statistics
* @return a snapshot of the statistics
*/
public List<DurationStats> getOperationStatistics() {
return swiftRestClient.getOperationStatistics();
}
/**
* Delete the entire tree. This is an internal one with slightly different
* behavior: if an entry is missing, a {@link FileNotFoundException} is
* raised. This lets the caller distinguish a file not found with
* other reasons for failure, so handles race conditions in recursive
* directory deletes better.
* <p/>
* The problem being addressed is: caller A requests a recursive directory
* of directory /dir ; caller B requests a delete of a file /dir/file,
* between caller A enumerating the files contents, and requesting a delete
* of /dir/file. We want to recognise the special case
* "directed file is no longer there" and not convert that into a failure
*
* @param absolutePath the path to delete.
* @param recursive if path is a directory and set to
* true, the directory is deleted else throws an exception if the
* directory is not empty
* case of a file the recursive can be set to either true or false.
* @return true if the object was deleted
* @throws IOException IO problems
* @throws FileNotFoundException if a file/dir being deleted is not there -
* this includes entries below the specified path, (if the path is a dir
* and recursive is true)
*/
public boolean delete(Path absolutePath, boolean recursive) throws IOException {
Path swiftPath = getCorrectSwiftPath(absolutePath);
SwiftUtils.debug(LOG, "Deleting path '%s' recursive=%b",
absolutePath,
recursive);
boolean askForNewest = true;
SwiftFileStatus fileStatus = getObjectMetadata(swiftPath, askForNewest);
//ask for the file/dir status, but don't demand the newest, as we
//don't mind if the directory has changed
//list all entries under this directory.
//this will throw FileNotFoundException if the file isn't there
FileStatus[] statuses = listSubPaths(absolutePath, true, askForNewest);
if (statuses == null) {
//the directory went away during the non-atomic stages of the operation.
// Return false as it was not this thread doing the deletion.
SwiftUtils.debug(LOG, "Path '%s' has no status -it has 'gone away'",
absolutePath,
recursive);
return false;
}
int filecount = statuses.length;
SwiftUtils.debug(LOG, "Path '%s' %d status entries'",
absolutePath,
filecount);
if (filecount == 0) {
//it's an empty directory or a path
rmdir(absolutePath);
return true;
}
if (LOG.isDebugEnabled()) {
SwiftUtils.debug(LOG, "%s", SwiftUtils.fileStatsToString(statuses, "\n"));
}
if (filecount == 1 && swiftPath.equals(statuses[0].getPath())) {
// 1 entry => simple file and it is the target
//simple file: delete it
SwiftUtils.debug(LOG, "Deleting simple file %s", absolutePath);
deleteObject(absolutePath);
return true;
}
//>1 entry implies directory with children. Run through them,
// but first check for the recursive flag and reject it *unless it looks
// like a partitioned file (len > 0 && has children)
if (!fileStatus.isDir()) {
LOG.debug("Multiple child entries but entry has data: assume partitioned");
} else if (!recursive) {
//if there are children, unless this is a recursive operation, fail immediately
throw new SwiftOperationFailedException("Directory " + fileStatus
+ " is not empty: "
+ SwiftUtils.fileStatsToString(
statuses, "; "));
}
//delete the entries. including ourself.
for (FileStatus entryStatus : statuses) {
Path entryPath = entryStatus.getPath();
try {
boolean deleted = deleteObject(entryPath);
if (!deleted) {
SwiftUtils.debug(LOG, "Failed to delete entry '%s'; continuing",
entryPath);
}
} catch (FileNotFoundException e) {
//the path went away -race conditions.
//do not fail, as the outcome is still OK.
SwiftUtils.debug(LOG, "Path '%s' is no longer present; continuing",
entryPath);
}
throttle();
}
//now delete self
SwiftUtils.debug(LOG, "Deleting base entry %s", absolutePath);
deleteObject(absolutePath);
return true;
}
}
|
apache/juneau | 32,810 | juneau-core/juneau-marshall/src/main/java/org/apache/juneau/xml/XmlParserSession.java | // ***************************************************************************************************************************
// * Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file *
// * distributed with this work for additional information regarding copyright ownership. The ASF licenses this file *
// * to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance *
// * with the License. You may obtain a copy of the License at *
// * *
// * http://www.apache.org/licenses/LICENSE-2.0 *
// * *
// * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an *
// * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the *
// * specific language governing permissions and limitations under the License. *
// ***************************************************************************************************************************
package org.apache.juneau.xml;
import static javax.xml.stream.XMLStreamConstants.*;
import static org.apache.juneau.common.utils.StringUtils.*;
import static org.apache.juneau.common.utils.Utils.*;
import static org.apache.juneau.xml.annotation.XmlFormat.*;
import java.io.*;
import java.lang.reflect.*;
import java.nio.charset.*;
import java.util.*;
import java.util.function.*;
import javax.xml.stream.*;
import javax.xml.stream.util.*;
import org.apache.juneau.*;
import org.apache.juneau.collections.*;
import org.apache.juneau.common.utils.*;
import org.apache.juneau.httppart.*;
import org.apache.juneau.internal.*;
import org.apache.juneau.parser.*;
import org.apache.juneau.swap.*;
import org.apache.juneau.xml.annotation.*;
/**
* Session object that lives for the duration of a single use of {@link XmlParser}.
*
* <h5 class='section'>Notes:</h5><ul>
* <li class='warn'>This class is not thread safe and is typically discarded after one use.
* </ul>
*
* <h5 class='section'>See Also:</h5><ul>
* <li class='link'><a class="doclink" href="https://juneau.apache.org/docs/topics/XmlBasics">XML Basics</a>
* </ul>
*/
@SuppressWarnings({ "unchecked", "rawtypes" })
public class XmlParserSession extends ReaderParserSession {
//-------------------------------------------------------------------------------------------------------------------
// Static
//-------------------------------------------------------------------------------------------------------------------
private static final int UNKNOWN=0, OBJECT=1, ARRAY=2, STRING=3, NUMBER=4, BOOLEAN=5, NULL=6;
/**
* Creates a new builder for this object.
*
* @param ctx The context creating this session.
* @return A new builder.
*/
public static Builder create(XmlParser ctx) {
return new Builder(ctx);
}
//-------------------------------------------------------------------------------------------------------------------
// Builder
//-------------------------------------------------------------------------------------------------------------------
/**
* Builder class.
*/
@FluentSetters
public static class Builder extends ReaderParserSession.Builder {
XmlParser ctx;
/**
* Constructor
*
* @param ctx The context creating this session.
*/
protected Builder(XmlParser ctx) {
super(ctx);
this.ctx = ctx;
}
@Override
public XmlParserSession build() {
return new XmlParserSession(this);
}
// <FluentSetters>
@Override /* GENERATED - org.apache.juneau.ContextSession.Builder */
public <T> Builder apply(Class<T> type, Consumer<T> apply) {
super.apply(type, apply);
return this;
}
@Override /* GENERATED - org.apache.juneau.ContextSession.Builder */
public Builder debug(Boolean value) {
super.debug(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.ContextSession.Builder */
public Builder properties(Map<String,Object> value) {
super.properties(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.ContextSession.Builder */
public Builder property(String key, Object value) {
super.property(key, value);
return this;
}
@Override /* GENERATED - org.apache.juneau.ContextSession.Builder */
public Builder unmodifiable() {
super.unmodifiable();
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanSession.Builder */
public Builder locale(Locale value) {
super.locale(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanSession.Builder */
public Builder localeDefault(Locale value) {
super.localeDefault(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanSession.Builder */
public Builder mediaType(MediaType value) {
super.mediaType(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanSession.Builder */
public Builder mediaTypeDefault(MediaType value) {
super.mediaTypeDefault(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanSession.Builder */
public Builder timeZone(TimeZone value) {
super.timeZone(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanSession.Builder */
public Builder timeZoneDefault(TimeZone value) {
super.timeZoneDefault(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.parser.ParserSession.Builder */
public Builder javaMethod(Method value) {
super.javaMethod(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.parser.ParserSession.Builder */
public Builder outer(Object value) {
super.outer(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.parser.ParserSession.Builder */
public Builder schema(HttpPartSchema value) {
super.schema(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.parser.ParserSession.Builder */
public Builder schemaDefault(HttpPartSchema value) {
super.schemaDefault(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.parser.ReaderParserSession.Builder */
public Builder fileCharset(Charset value) {
super.fileCharset(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.parser.ReaderParserSession.Builder */
public Builder streamCharset(Charset value) {
super.streamCharset(value);
return this;
}
// </FluentSetters>
}
//-------------------------------------------------------------------------------------------------------------------
// Instance
//-------------------------------------------------------------------------------------------------------------------
private final XmlParser ctx;
private final StringBuilder rsb = new StringBuilder(); // Reusable string builder used in this class.
/**
* Constructor.
*
* @param builder The builder for this object.
*/
protected XmlParserSession(Builder builder) {
super(builder);
ctx = builder.ctx;
}
/**
* Wrap the specified reader in a STAX reader based on settings in this context.
*
* @param pipe The parser input.
* @return The new STAX reader.
* @throws IOException Thrown by underlying stream.
* @throws XMLStreamException Unexpected XML processing error.
*/
protected final XmlReader getXmlReader(ParserPipe pipe) throws IOException, XMLStreamException {
return new XmlReader(pipe, isValidating(), getReporter(), getResolver(), getEventAllocator());
}
/**
* Decodes and trims the specified string.
*
* <p>
* Any <js>'_x####_'</js> sequences in the string will be decoded.
*
* @param s The string to be decoded.
* @return The decoded string.
*/
protected final String decodeString(String s) {
if (s == null)
return null;
rsb.setLength(0);
s = XmlUtils.decode(s, rsb);
if (isTrimStrings())
s = s.trim();
return s;
}
/*
* Returns the name of the current XML element.
* Any <js>'_x####_'</js> sequences in the string will be decoded.
*/
private String getElementName(XmlReader r) {
return decodeString(r.getLocalName());
}
/*
* Returns the _name attribute value.
* Any <js>'_x####_'</js> sequences in the string will be decoded.
*/
private String getNameProperty(XmlReader r) {
return decodeString(r.getAttributeValue(null, getNamePropertyName()));
}
/*
* Returns the name of the specified attribute on the current XML element.
* Any <js>'_x####_'</js> sequences in the string will be decoded.
*/
private String getAttributeName(XmlReader r, int i) {
return decodeString(r.getAttributeLocalName(i));
}
/*
* Returns the value of the specified attribute on the current XML element.
* Any <js>'_x####_'</js> sequences in the string will be decoded.
*/
private String getAttributeValue(XmlReader r, int i) {
return decodeString(r.getAttributeValue(i));
}
/**
* Returns the text content of the current XML element.
*
* <p>
* Any <js>'_x####_'</js> sequences in the string will be decoded.
*
* <p>
* Leading and trailing whitespace (unencoded) will be trimmed from the result.
*
* @param r The reader to read the element text from.
* @return The decoded text. <jk>null</jk> if the text consists of the sequence <js>'_x0000_'</js>.
* @throws XMLStreamException Thrown by underlying reader.
* @throws IOException Thrown by underlying stream.
* @throws ParseException Malformed input encountered.
*/
protected String getElementText(XmlReader r) throws XMLStreamException, IOException, ParseException {
return decodeString(r.getElementText().trim());
}
/*
* Returns the content of the current CHARACTERS node.
* Any <js>'_x####_'</js> sequences in the string will be decoded.
* Leading and trailing whitespace (unencoded) will be trimmed from the result.
*/
private String getText(XmlReader r, boolean trim) {
String s = r.getText();
if (trim)
s = s.trim();
if (s.isEmpty())
return null;
return decodeString(s);
}
/*
* Shortcut for calling <code>getText(r, <jk>true</jk>);</code>.
*/
private String getText(XmlReader r) {
return getText(r, true);
}
/*
* Takes the element being read from the XML stream reader and reconstructs it as XML.
* Used when reconstructing bean properties of type {@link XmlFormat#XMLTEXT}.
*/
private String getElementAsString(XmlReader r) {
int t = r.getEventType();
if (t > 2)
throw new BasicRuntimeException("Invalid event type on stream reader for elementToString() method: ''{0}''", XmlUtils.toReadableEvent(r));
rsb.setLength(0);
rsb.append("<").append(t == 1 ? "" : "/").append(r.getLocalName());
if (t == 1)
for (int i = 0; i < r.getAttributeCount(); i++)
rsb.append(' ').append(r.getAttributeName(i)).append('=').append('\'').append(r.getAttributeValue(i)).append('\'');
rsb.append('>');
return rsb.toString();
}
/**
* Parses the current element as text.
*
* @param r The input reader.
* @return The parsed text.
* @throws XMLStreamException Thrown by underlying reader.
* @throws IOException Thrown by underlying stream.
* @throws ParseException Malformed input encountered.
*/
protected String parseText(XmlReader r) throws IOException, XMLStreamException, ParseException {
// Note that this is different than {@link #getText(XmlReader)} since it assumes that we're pointing to a
// whitespace element.
StringBuilder sb2 = getStringBuilder();
int depth = 0;
while (true) {
int et = r.getEventType();
if (et == START_ELEMENT) {
sb2.append(getElementAsString(r));
depth++;
} else if (et == CHARACTERS) {
sb2.append(getText(r));
} else if (et == END_ELEMENT) {
sb2.append(getElementAsString(r));
depth--;
if (depth <= 0)
break;
}
et = r.next();
}
String s = sb2.toString();
returnStringBuilder(sb2);
return s;
}
/**
* Returns <jk>true</jk> if the current element is a whitespace element.
*
* <p>
* For the XML parser, this always returns <jk>false</jk>.
* However, the HTML parser defines various whitespace elements such as <js>"br"</js> and <js>"sp"</js>.
*
* @param r The XML stream reader to read the current event from.
* @return <jk>true</jk> if the current element is a whitespace element.
*/
protected boolean isWhitespaceElement(XmlReader r) {
return false;
}
/**
* Parses the current whitespace element.
*
* <p>
* For the XML parser, this always returns <jk>null</jk> since there is no concept of a whitespace element.
* However, the HTML parser defines various whitespace elements such as <js>"br"</js> and <js>"sp"</js>.
*
* @param r The XML stream reader to read the current event from.
* @return The whitespace character or characters.
* @throws XMLStreamException Thrown by underlying reader.
* @throws IOException Thrown by underlying stream.
* @throws ParseException Malformed input encountered.
*/
protected String parseWhitespaceElement(XmlReader r) throws IOException, XMLStreamException, ParseException {
return null;
}
@Override /* ParserSession */
protected <T> T doParse(ParserPipe pipe, ClassMeta<T> type) throws IOException, ParseException, ExecutableException {
try {
return parseAnything(type, null, getXmlReader(pipe), getOuter(), true, null);
} catch (XMLStreamException e) {
throw new ParseException(e);
}
}
@Override /* ReaderParserSession */
protected <K,V> Map<K,V> doParseIntoMap(ParserPipe pipe, Map<K,V> m, Type keyType, Type valueType) throws Exception {
ClassMeta cm = getClassMeta(m.getClass(), keyType, valueType);
return parseIntoMap(pipe, m, cm.getKeyType(), cm.getValueType());
}
@Override /* ReaderParserSession */
protected <E> Collection<E> doParseIntoCollection(ParserPipe pipe, Collection<E> c, Type elementType) throws Exception {
ClassMeta cm = getClassMeta(c.getClass(), elementType);
return parseIntoCollection(pipe, c, cm.getElementType());
}
/**
* Workhorse method.
*
* @param <T> The expected type of object.
* @param eType The expected type of object.
* @param currAttr The current bean property name.
* @param r The reader.
* @param outer The outer object.
* @param isRoot If <jk>true</jk>, then we're serializing a root element in the document.
* @param pMeta The bean property metadata.
* @return The parsed object.
* @throws IOException Thrown by underlying stream.
* @throws ParseException Malformed input encountered.
* @throws ExecutableException Exception occurred on invoked constructor/method/field.
* @throws XMLStreamException Malformed XML encountered.
*/
protected <T> T parseAnything(ClassMeta<T> eType, String currAttr, XmlReader r,
Object outer, boolean isRoot, BeanPropertyMeta pMeta) throws IOException, ParseException, ExecutableException, XMLStreamException {
if (eType == null)
eType = (ClassMeta<T>)object();
ObjectSwap<T,Object> swap = (ObjectSwap<T,Object>)eType.getSwap(this);
BuilderSwap<T,Object> builder = (BuilderSwap<T,Object>)eType.getBuilderSwap(this);
ClassMeta<?> sType = null;
if (builder != null)
sType = builder.getBuilderClassMeta(this);
else if (swap != null)
sType = swap.getSwapClassMeta(this);
else
sType = eType;
if (sType.isOptional())
return (T)Utils.opt(parseAnything(eType.getElementType(), currAttr, r, outer, isRoot, pMeta));
setCurrentClass(sType);
String wrapperAttr = (isRoot && isPreserveRootElement()) ? r.getName().getLocalPart() : null;
String typeAttr = r.getAttributeValue(null, getBeanTypePropertyName(eType));
boolean isNil = "true".equals(r.getAttributeValue(null, "nil"));
int jsonType = getJsonType(typeAttr);
String elementName = getElementName(r);
if (jsonType == 0) {
if (elementName == null || elementName.equals(currAttr))
jsonType = UNKNOWN;
else {
typeAttr = elementName;
jsonType = getJsonType(elementName);
}
}
ClassMeta tcm = getClassMeta(typeAttr, pMeta, eType);
if (tcm == null && elementName != null && ! elementName.equals(currAttr))
tcm = getClassMeta(elementName, pMeta, eType);
if (tcm != null)
sType = eType = tcm;
Object o = null;
if (jsonType == NULL) {
r.nextTag(); // Discard end tag
return null;
}
if (sType.isObject()) {
if (jsonType == OBJECT) {
JsonMap m = new JsonMap(this);
parseIntoMap(r, m, string(), object(), pMeta);
if (wrapperAttr != null)
m = new JsonMap(this).append(wrapperAttr, m);
o = cast(m, pMeta, eType);
} else if (jsonType == ARRAY)
o = parseIntoCollection(r, new JsonList(this), null, pMeta);
else if (jsonType == STRING) {
o = getElementText(r);
if (sType.isChar())
o = parseCharacter(o);
}
else if (jsonType == NUMBER)
o = parseNumber(getElementText(r), null);
else if (jsonType == BOOLEAN)
o = Boolean.parseBoolean(getElementText(r));
else if (jsonType == UNKNOWN)
o = getUnknown(r);
} else if (sType.isBoolean()) {
o = Boolean.parseBoolean(getElementText(r));
} else if (sType.isCharSequence()) {
o = getElementText(r);
} else if (sType.isChar()) {
o = parseCharacter(getElementText(r));
} else if (sType.isMap()) {
Map m = (sType.canCreateNewInstance(outer) ? (Map)sType.newInstance(outer) : newGenericMap(sType));
o = parseIntoMap(r, m, sType.getKeyType(), sType.getValueType(), pMeta);
if (wrapperAttr != null)
o = new JsonMap(this).append(wrapperAttr, m);
} else if (sType.isCollection()) {
Collection l = (sType.canCreateNewInstance(outer) ? (Collection)sType.newInstance(outer) : new JsonList(this));
o = parseIntoCollection(r, l, sType, pMeta);
} else if (sType.isNumber()) {
o = parseNumber(getElementText(r), (Class<? extends Number>)sType.getInnerClass());
} else if (builder != null || sType.canCreateNewBean(outer)) {
if (getXmlClassMeta(sType).getFormat() == COLLAPSED) {
String fieldName = r.getLocalName();
BeanMap<?> m = builder != null ? toBeanMap(builder.create(this, eType)) : newBeanMap(outer, sType.getInnerClass());
BeanPropertyMeta bpm = getXmlBeanMeta(m.getMeta()).getPropertyMeta(fieldName);
ClassMeta<?> cm = m.getMeta().getClassMeta();
Object value = parseAnything(cm, currAttr, r, m.getBean(false), false, null);
setName(cm, value, currAttr);
bpm.set(m, currAttr, value);
o = builder != null ? builder.build(this, m.getBean(), eType) : m.getBean();
} else {
BeanMap m = builder != null ? toBeanMap(builder.create(this, eType)) : newBeanMap(outer, sType.getInnerClass());
m = parseIntoBean(r, m, isNil);
o = builder != null ? builder.build(this, m.getBean(), eType) : m.getBean();
}
} else if (sType.isArray() || sType.isArgs()) {
ArrayList l = (ArrayList)parseIntoCollection(r, list(), sType, pMeta);
o = toArray(sType, l);
} else if (sType.canCreateNewInstanceFromString(outer)) {
o = sType.newInstanceFromString(outer, getElementText(r));
} else if (sType.getProxyInvocationHandler() != null) {
JsonMap m = new JsonMap(this);
parseIntoMap(r, m, string(), object(), pMeta);
if (wrapperAttr != null)
m = new JsonMap(this).append(wrapperAttr, m);
o = newBeanMap(outer, sType.getInnerClass()).load(m).getBean();
} else {
throw new ParseException(this,
"Class ''{0}'' could not be instantiated. Reason: ''{1}'', property: ''{2}''",
sType.getInnerClass().getName(), sType.getNotABeanReason(), pMeta == null ? null : pMeta.getName());
}
if (swap != null && o != null)
o = unswap(swap, o, eType);
if (outer != null)
setParent(eType, o, outer);
return (T)o;
}
private <K,V> Map<K,V> parseIntoMap(XmlReader r, Map<K,V> m, ClassMeta<K> keyType,
ClassMeta<V> valueType, BeanPropertyMeta pMeta) throws IOException, ParseException, ExecutableException, XMLStreamException {
int depth = 0;
for (int i = 0; i < r.getAttributeCount(); i++) {
String a = r.getAttributeLocalName(i);
// TODO - Need better handling of namespaces here.
if (! isSpecialAttr(a)) {
K key = trim(convertAttrToType(m, a, keyType));
V value = trim(convertAttrToType(m, r.getAttributeValue(i), valueType));
setName(valueType, value, key);
m.put(key, value);
}
}
do {
int event = r.nextTag();
String currAttr;
if (event == START_ELEMENT) {
depth++;
currAttr = getNameProperty(r);
if (currAttr == null)
currAttr = getElementName(r);
K key = convertAttrToType(m, currAttr, keyType);
V value = parseAnything(valueType, currAttr, r, m, false, pMeta);
setName(valueType, value, currAttr);
if (valueType.isObject() && m.containsKey(key)) {
Object o = m.get(key);
if (o instanceof List)
((List)o).add(value);
else
m.put(key, (V)new JsonList(o, value).setBeanSession(this));
} else {
m.put(key, value);
}
} else if (event == END_ELEMENT) {
depth--;
return m;
}
} while (depth > 0);
return m;
}
private <E> Collection<E> parseIntoCollection(XmlReader r, Collection<E> l,
ClassMeta<?> type, BeanPropertyMeta pMeta) throws IOException, ParseException, ExecutableException, XMLStreamException {
int depth = 0;
int argIndex = 0;
do {
int event = r.nextTag();
if (event == START_ELEMENT) {
depth++;
ClassMeta<?> elementType = type == null ? object() : type.isArgs() ? type.getArg(argIndex++) : type.getElementType();
E value = (E)parseAnything(elementType, null, r, l, false, pMeta);
l.add(value);
} else if (event == END_ELEMENT) {
depth--;
return l;
}
} while (depth > 0);
return l;
}
private static int getJsonType(String s) {
if (s == null)
return UNKNOWN;
char c = s.charAt(0);
switch(c) {
case 'o': return (s.equals("object") ? OBJECT : UNKNOWN);
case 'a': return (s.equals("array") ? ARRAY : UNKNOWN);
case 's': return (s.equals("string") ? STRING : UNKNOWN);
case 'b': return (s.equals("boolean") ? BOOLEAN : UNKNOWN);
case 'n': {
c = s.charAt(2);
switch(c) {
case 'm': return (s.equals("number") ? NUMBER : UNKNOWN);
case 'l': return (s.equals("null") ? NULL : UNKNOWN);
}
//return NUMBER;
}
}
return UNKNOWN;
}
private <T> BeanMap<T> parseIntoBean(XmlReader r, BeanMap<T> m, boolean isNil) throws IOException, ParseException, ExecutableException, XMLStreamException {
BeanMeta<?> bMeta = m.getMeta();
XmlBeanMeta xmlMeta = getXmlBeanMeta(bMeta);
for (int i = 0; i < r.getAttributeCount(); i++) {
String key = getAttributeName(r, i);
if (! ("nil".equals(key) || isSpecialAttr(key))) {
String val = r.getAttributeValue(i);
String ns = r.getAttributeNamespace(i);
BeanPropertyMeta bpm = xmlMeta.getPropertyMeta(key);
if (bpm == null) {
if (xmlMeta.getAttrsProperty() != null) {
xmlMeta.getAttrsProperty().add(m, key, key, val);
} else if (ns == null) {
onUnknownProperty(key, m, val);
}
} else {
try {
bpm.set(m, key, val);
} catch (BeanRuntimeException e) {
onBeanSetterException(bpm, e);
throw e;
}
}
}
}
BeanPropertyMeta cp = xmlMeta.getContentProperty();
XmlFormat cpf = xmlMeta.getContentFormat();
boolean trim = cp == null || ! cpf.isOneOf(MIXED_PWS, TEXT_PWS);
ClassMeta<?> cpcm = (cp == null ? object() : cp.getClassMeta());
StringBuilder sb = null;
BeanRegistry breg = cp == null ? null : cp.getBeanRegistry();
LinkedList<Object> l = null;
int depth = 0;
do {
int event = r.next();
String currAttr;
// We only care about text in MIXED mode.
// Ignore if in ELEMENTS mode.
if (event == CHARACTERS) {
if (cp != null && cpf.isOneOf(MIXED, MIXED_PWS)) {
if (cpcm.isCollectionOrArray()) {
if (l == null)
l = new LinkedList<>();
l.add(getText(r, false));
} else {
cp.set(m, null, getText(r, trim));
}
} else if (cpf != ELEMENTS) {
String s = getText(r, trim);
if (s != null) {
if (sb == null)
sb = getStringBuilder();
sb.append(s);
}
} else {
// Do nothing...we're in ELEMENTS mode.
}
} else if (event == START_ELEMENT) {
if (cp != null && cpf.isOneOf(TEXT, TEXT_PWS)) {
String s = parseText(r);
if (s != null) {
if (sb == null)
sb = getStringBuilder();
sb.append(s);
}
depth--;
} else if (cpf == XMLTEXT) {
if (sb == null)
sb = getStringBuilder();
sb.append(getElementAsString(r));
depth++;
} else if (cp != null && cpf.isOneOf(MIXED, MIXED_PWS)) {
if (isWhitespaceElement(r) && (breg == null || ! breg.hasName(r.getLocalName()))) {
if (cpcm.isCollectionOrArray()) {
if (l == null)
l = new LinkedList<>();
l.add(parseWhitespaceElement(r));
} else {
cp.set(m, null, parseWhitespaceElement(r));
}
} else {
if (cpcm.isCollectionOrArray()) {
if (l == null)
l = new LinkedList<>();
l.add(parseAnything(cpcm.getElementType(), cp.getName(), r, m.getBean(false), false, cp));
} else {
cp.set(m, null, parseAnything(cpcm, cp.getName(), r, m.getBean(false), false, cp));
}
}
} else if (cp != null && cpf == ELEMENTS) {
cp.add(m, null, parseAnything(cpcm.getElementType(), cp.getName(), r, m.getBean(false), false, cp));
} else {
currAttr = getNameProperty(r);
if (currAttr == null)
currAttr = getElementName(r);
BeanPropertyMeta pMeta = xmlMeta.getPropertyMeta(currAttr);
if (pMeta == null) {
Object value = parseAnything(object(), currAttr, r, m.getBean(false), false, null);
onUnknownProperty(currAttr, m, value);
} else {
setCurrentProperty(pMeta);
XmlFormat xf = getXmlBeanPropertyMeta(pMeta).getXmlFormat();
if (xf == COLLAPSED) {
ClassMeta<?> et = pMeta.getClassMeta().getElementType();
Object value = parseAnything(et, currAttr, r, m.getBean(false), false, pMeta);
setName(et, value, currAttr);
pMeta.add(m, currAttr, value);
} else if (xf == ATTR) {
pMeta.set(m, currAttr, getAttributeValue(r, 0));
r.nextTag();
} else {
ClassMeta<?> cm = pMeta.getClassMeta();
Object value = parseAnything(cm, currAttr, r, m.getBean(false), false, pMeta);
setName(cm, value, currAttr);
pMeta.set(m, currAttr, value);
}
setCurrentProperty(null);
}
}
} else if (event == END_ELEMENT) {
if (depth > 0) {
if (cpf == XMLTEXT) {
if (sb == null)
sb = getStringBuilder();
sb.append(getElementAsString(r));
}
else
throw new ParseException("End element found where one was not expected. {0}", XmlUtils.toReadableEvent(r));
}
depth--;
} else if (event == COMMENT) {
// Ignore comments.
} else {
throw new ParseException("Unexpected event type: {0}", XmlUtils.toReadableEvent(r));
}
} while (depth >= 0);
if (cp != null && ! isNil) {
if (sb != null)
cp.set(m, null, sb.toString());
else if (l != null)
cp.set(m, null, XmlUtils.collapseTextNodes(l));
else if (cpcm.isCollectionOrArray()) {
Object o = cp.get(m, null);
if (o == null)
cp.set(m, cp.getName(), list());
}
}
returnStringBuilder(sb);
return m;
}
private boolean isSpecialAttr(String key) {
return key.equals(getBeanTypePropertyName(null)) || key.equals(getNamePropertyName());
}
private Object getUnknown(XmlReader r) throws IOException, ParseException, ExecutableException, XMLStreamException {
if (r.getEventType() != START_ELEMENT) {
throw new ParseException(this, "Parser must be on START_ELEMENT to read next text.");
}
JsonMap m = null;
// If this element has attributes, then it's always a JsonMap.
if (r.getAttributeCount() > 0) {
m = new JsonMap(this);
for (int i = 0; i < r.getAttributeCount(); i++) {
String key = getAttributeName(r, i);
String val = r.getAttributeValue(i);
if (! isSpecialAttr(key))
m.put(key, val);
}
}
int eventType = r.next();
StringBuilder sb = getStringBuilder();
while (eventType != END_ELEMENT) {
if (eventType == CHARACTERS || eventType == CDATA || eventType == SPACE || eventType == ENTITY_REFERENCE) {
sb.append(r.getText());
} else if (eventType == PROCESSING_INSTRUCTION || eventType == COMMENT) {
// skipping
} else if (eventType == END_DOCUMENT) {
throw new ParseException(this, "Unexpected end of document when reading element text content");
} else if (eventType == START_ELEMENT) {
// Oops...this has an element in it.
// Parse it as a map.
if (m == null)
m = new JsonMap(this);
int depth = 0;
do {
int event = (eventType == -1 ? r.nextTag() : eventType);
String currAttr;
if (event == START_ELEMENT) {
depth++;
currAttr = getNameProperty(r);
if (currAttr == null)
currAttr = getElementName(r);
String key = convertAttrToType(null, currAttr, string());
Object value = parseAnything(object(), currAttr, r, null, false, null);
if (m.containsKey(key)) {
Object o = m.get(key);
if (o instanceof JsonList)
((JsonList)o).add(value);
else
m.put(key, new JsonList(o, value).setBeanSession(this));
} else {
m.put(key, value);
}
} else if (event == END_ELEMENT) {
depth--;
break;
}
eventType = -1;
} while (depth > 0);
break;
} else {
throw new ParseException(this, "Unexpected event type ''{0}''", eventType);
}
eventType = r.next();
}
String s = sb.toString().trim();
returnStringBuilder(sb);
s = decodeString(s);
if (m != null) {
if (! s.isEmpty())
m.put("contents", s);
return m;
}
return s;
}
//-----------------------------------------------------------------------------------------------------------------
// Properties
//-----------------------------------------------------------------------------------------------------------------
/**
* XML event allocator.
*
* @see XmlParser.Builder#eventAllocator(Class)
* @return
* The {@link XMLEventAllocator} associated with this parser, or <jk>null</jk> if there isn't one.
*/
protected final XMLEventAllocator getEventAllocator() {
return ctx.getEventAllocator();
}
/**
* Preserve root element during generalized parsing.
*
* @see XmlParser.Builder#preserveRootElement()
* @return
* <jk>true</jk> if when parsing into a generic {@link JsonMap}, the map will contain a single entry whose key
* is the root element name.
*/
protected final boolean isPreserveRootElement() {
return ctx.isPreserveRootElement();
}
/**
* XML reporter.
*
* @see XmlParser.Builder#reporter(Class)
* @return
* The {@link XMLReporter} associated with this parser, or <jk>null</jk> if there isn't one.
*/
protected final XMLReporter getReporter() {
return ctx.getReporter();
}
/**
* XML resolver.
*
* @see XmlParser.Builder#resolver(Class)
* @return
* The {@link XMLResolver} associated with this parser, or <jk>null</jk> if there isn't one.
*/
protected final XMLResolver getResolver() {
return ctx.getResolver();
}
/**
* Enable validation.
*
* @see XmlParser.Builder#validating()
* @return
* <jk>true</jk> if XML document will be validated.
*/
protected final boolean isValidating() {
return ctx.isValidating();
}
//-----------------------------------------------------------------------------------------------------------------
// Extended metadata
//-----------------------------------------------------------------------------------------------------------------
/**
* Returns the language-specific metadata on the specified class.
*
* @param cm The class to return the metadata on.
* @return The metadata.
*/
protected XmlClassMeta getXmlClassMeta(ClassMeta<?> cm) {
return ctx.getXmlClassMeta(cm);
}
/**
* Returns the language-specific metadata on the specified bean.
*
* @param bm The bean to return the metadata on.
* @return The metadata.
*/
protected XmlBeanMeta getXmlBeanMeta(BeanMeta<?> bm) {
return ctx.getXmlBeanMeta(bm);
}
/**
* Returns the language-specific metadata on the specified bean property.
*
* @param bpm The bean property to return the metadata on.
* @return The metadata.
*/
protected XmlBeanPropertyMeta getXmlBeanPropertyMeta(BeanPropertyMeta bpm) {
return ctx.getXmlBeanPropertyMeta(bpm);
}
}
|
googleapis/google-cloud-java | 34,950 | java-privilegedaccessmanager/proto-google-cloud-privilegedaccessmanager-v1/src/main/java/com/google/cloud/privilegedaccessmanager/v1/DeleteEntitlementRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/privilegedaccessmanager/v1/privilegedaccessmanager.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.privilegedaccessmanager.v1;
/**
*
*
* <pre>
* Message for deleting an entitlement.
* </pre>
*
* Protobuf type {@code google.cloud.privilegedaccessmanager.v1.DeleteEntitlementRequest}
*/
public final class DeleteEntitlementRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.privilegedaccessmanager.v1.DeleteEntitlementRequest)
DeleteEntitlementRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use DeleteEntitlementRequest.newBuilder() to construct.
private DeleteEntitlementRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private DeleteEntitlementRequest() {
name_ = "";
requestId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new DeleteEntitlementRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.privilegedaccessmanager.v1.PrivilegedAccessManagerProto
.internal_static_google_cloud_privilegedaccessmanager_v1_DeleteEntitlementRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.privilegedaccessmanager.v1.PrivilegedAccessManagerProto
.internal_static_google_cloud_privilegedaccessmanager_v1_DeleteEntitlementRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.privilegedaccessmanager.v1.DeleteEntitlementRequest.class,
com.google.cloud.privilegedaccessmanager.v1.DeleteEntitlementRequest.Builder.class);
}
public static final int NAME_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object name_ = "";
/**
*
*
* <pre>
* Required. Name of the resource.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The name.
*/
@java.lang.Override
public java.lang.String getName() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. Name of the resource.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for name.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int REQUEST_ID_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object requestId_ = "";
/**
*
*
* <pre>
* Optional. An optional request ID to identify requests. Specify a unique
* request ID so that if you must retry your request, the server knows to
* ignore the request if it has already been completed. The server guarantees
* this for at least 60 minutes after the first request.
*
* For example, consider a situation where you make an initial request and the
* request times out. If you make the request again with the same request
* ID, the server can check if original operation with the same request ID
* was received, and if so, ignores the second request.
*
* The request ID must be a valid UUID with the exception that zero UUID is
* not supported (00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The requestId.
*/
@java.lang.Override
public java.lang.String getRequestId() {
java.lang.Object ref = requestId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
requestId_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. An optional request ID to identify requests. Specify a unique
* request ID so that if you must retry your request, the server knows to
* ignore the request if it has already been completed. The server guarantees
* this for at least 60 minutes after the first request.
*
* For example, consider a situation where you make an initial request and the
* request times out. If you make the request again with the same request
* ID, the server can check if original operation with the same request ID
* was received, and if so, ignores the second request.
*
* The request ID must be a valid UUID with the exception that zero UUID is
* not supported (00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for requestId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getRequestIdBytes() {
java.lang.Object ref = requestId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
requestId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int FORCE_FIELD_NUMBER = 3;
private boolean force_ = false;
/**
*
*
* <pre>
* Optional. If set to true, any child grant under this entitlement is also
* deleted. (Otherwise, the request only works if the entitlement has no child
* grant.)
* </pre>
*
* <code>bool force = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The force.
*/
@java.lang.Override
public boolean getForce() {
return force_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(requestId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, requestId_);
}
if (force_ != false) {
output.writeBool(3, force_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(requestId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, requestId_);
}
if (force_ != false) {
size += com.google.protobuf.CodedOutputStream.computeBoolSize(3, force_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.privilegedaccessmanager.v1.DeleteEntitlementRequest)) {
return super.equals(obj);
}
com.google.cloud.privilegedaccessmanager.v1.DeleteEntitlementRequest other =
(com.google.cloud.privilegedaccessmanager.v1.DeleteEntitlementRequest) obj;
if (!getName().equals(other.getName())) return false;
if (!getRequestId().equals(other.getRequestId())) return false;
if (getForce() != other.getForce()) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + NAME_FIELD_NUMBER;
hash = (53 * hash) + getName().hashCode();
hash = (37 * hash) + REQUEST_ID_FIELD_NUMBER;
hash = (53 * hash) + getRequestId().hashCode();
hash = (37 * hash) + FORCE_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getForce());
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.privilegedaccessmanager.v1.DeleteEntitlementRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.privilegedaccessmanager.v1.DeleteEntitlementRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.privilegedaccessmanager.v1.DeleteEntitlementRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.privilegedaccessmanager.v1.DeleteEntitlementRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.privilegedaccessmanager.v1.DeleteEntitlementRequest parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.privilegedaccessmanager.v1.DeleteEntitlementRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.privilegedaccessmanager.v1.DeleteEntitlementRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.privilegedaccessmanager.v1.DeleteEntitlementRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.privilegedaccessmanager.v1.DeleteEntitlementRequest
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.privilegedaccessmanager.v1.DeleteEntitlementRequest
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.privilegedaccessmanager.v1.DeleteEntitlementRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.privilegedaccessmanager.v1.DeleteEntitlementRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.privilegedaccessmanager.v1.DeleteEntitlementRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Message for deleting an entitlement.
* </pre>
*
* Protobuf type {@code google.cloud.privilegedaccessmanager.v1.DeleteEntitlementRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.privilegedaccessmanager.v1.DeleteEntitlementRequest)
com.google.cloud.privilegedaccessmanager.v1.DeleteEntitlementRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.privilegedaccessmanager.v1.PrivilegedAccessManagerProto
.internal_static_google_cloud_privilegedaccessmanager_v1_DeleteEntitlementRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.privilegedaccessmanager.v1.PrivilegedAccessManagerProto
.internal_static_google_cloud_privilegedaccessmanager_v1_DeleteEntitlementRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.privilegedaccessmanager.v1.DeleteEntitlementRequest.class,
com.google.cloud.privilegedaccessmanager.v1.DeleteEntitlementRequest.Builder.class);
}
// Construct using
// com.google.cloud.privilegedaccessmanager.v1.DeleteEntitlementRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
name_ = "";
requestId_ = "";
force_ = false;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.privilegedaccessmanager.v1.PrivilegedAccessManagerProto
.internal_static_google_cloud_privilegedaccessmanager_v1_DeleteEntitlementRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.privilegedaccessmanager.v1.DeleteEntitlementRequest
getDefaultInstanceForType() {
return com.google.cloud.privilegedaccessmanager.v1.DeleteEntitlementRequest
.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.privilegedaccessmanager.v1.DeleteEntitlementRequest build() {
com.google.cloud.privilegedaccessmanager.v1.DeleteEntitlementRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.privilegedaccessmanager.v1.DeleteEntitlementRequest buildPartial() {
com.google.cloud.privilegedaccessmanager.v1.DeleteEntitlementRequest result =
new com.google.cloud.privilegedaccessmanager.v1.DeleteEntitlementRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.privilegedaccessmanager.v1.DeleteEntitlementRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.name_ = name_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.requestId_ = requestId_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.force_ = force_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.privilegedaccessmanager.v1.DeleteEntitlementRequest) {
return mergeFrom(
(com.google.cloud.privilegedaccessmanager.v1.DeleteEntitlementRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.privilegedaccessmanager.v1.DeleteEntitlementRequest other) {
if (other
== com.google.cloud.privilegedaccessmanager.v1.DeleteEntitlementRequest
.getDefaultInstance()) return this;
if (!other.getName().isEmpty()) {
name_ = other.name_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getRequestId().isEmpty()) {
requestId_ = other.requestId_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.getForce() != false) {
setForce(other.getForce());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
name_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
requestId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 24:
{
force_ = input.readBool();
bitField0_ |= 0x00000004;
break;
} // case 24
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object name_ = "";
/**
*
*
* <pre>
* Required. Name of the resource.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The name.
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. Name of the resource.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for name.
*/
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. Name of the resource.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The name to set.
* @return This builder for chaining.
*/
public Builder setName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
name_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Name of the resource.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearName() {
name_ = getDefaultInstance().getName();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Name of the resource.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for name to set.
* @return This builder for chaining.
*/
public Builder setNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
name_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object requestId_ = "";
/**
*
*
* <pre>
* Optional. An optional request ID to identify requests. Specify a unique
* request ID so that if you must retry your request, the server knows to
* ignore the request if it has already been completed. The server guarantees
* this for at least 60 minutes after the first request.
*
* For example, consider a situation where you make an initial request and the
* request times out. If you make the request again with the same request
* ID, the server can check if original operation with the same request ID
* was received, and if so, ignores the second request.
*
* The request ID must be a valid UUID with the exception that zero UUID is
* not supported (00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The requestId.
*/
public java.lang.String getRequestId() {
java.lang.Object ref = requestId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
requestId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. An optional request ID to identify requests. Specify a unique
* request ID so that if you must retry your request, the server knows to
* ignore the request if it has already been completed. The server guarantees
* this for at least 60 minutes after the first request.
*
* For example, consider a situation where you make an initial request and the
* request times out. If you make the request again with the same request
* ID, the server can check if original operation with the same request ID
* was received, and if so, ignores the second request.
*
* The request ID must be a valid UUID with the exception that zero UUID is
* not supported (00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for requestId.
*/
public com.google.protobuf.ByteString getRequestIdBytes() {
java.lang.Object ref = requestId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
requestId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. An optional request ID to identify requests. Specify a unique
* request ID so that if you must retry your request, the server knows to
* ignore the request if it has already been completed. The server guarantees
* this for at least 60 minutes after the first request.
*
* For example, consider a situation where you make an initial request and the
* request times out. If you make the request again with the same request
* ID, the server can check if original operation with the same request ID
* was received, and if so, ignores the second request.
*
* The request ID must be a valid UUID with the exception that zero UUID is
* not supported (00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The requestId to set.
* @return This builder for chaining.
*/
public Builder setRequestId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
requestId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. An optional request ID to identify requests. Specify a unique
* request ID so that if you must retry your request, the server knows to
* ignore the request if it has already been completed. The server guarantees
* this for at least 60 minutes after the first request.
*
* For example, consider a situation where you make an initial request and the
* request times out. If you make the request again with the same request
* ID, the server can check if original operation with the same request ID
* was received, and if so, ignores the second request.
*
* The request ID must be a valid UUID with the exception that zero UUID is
* not supported (00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearRequestId() {
requestId_ = getDefaultInstance().getRequestId();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. An optional request ID to identify requests. Specify a unique
* request ID so that if you must retry your request, the server knows to
* ignore the request if it has already been completed. The server guarantees
* this for at least 60 minutes after the first request.
*
* For example, consider a situation where you make an initial request and the
* request times out. If you make the request again with the same request
* ID, the server can check if original operation with the same request ID
* was received, and if so, ignores the second request.
*
* The request ID must be a valid UUID with the exception that zero UUID is
* not supported (00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for requestId to set.
* @return This builder for chaining.
*/
public Builder setRequestIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
requestId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private boolean force_;
/**
*
*
* <pre>
* Optional. If set to true, any child grant under this entitlement is also
* deleted. (Otherwise, the request only works if the entitlement has no child
* grant.)
* </pre>
*
* <code>bool force = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The force.
*/
@java.lang.Override
public boolean getForce() {
return force_;
}
/**
*
*
* <pre>
* Optional. If set to true, any child grant under this entitlement is also
* deleted. (Otherwise, the request only works if the entitlement has no child
* grant.)
* </pre>
*
* <code>bool force = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The force to set.
* @return This builder for chaining.
*/
public Builder setForce(boolean value) {
force_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. If set to true, any child grant under this entitlement is also
* deleted. (Otherwise, the request only works if the entitlement has no child
* grant.)
* </pre>
*
* <code>bool force = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearForce() {
bitField0_ = (bitField0_ & ~0x00000004);
force_ = false;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.privilegedaccessmanager.v1.DeleteEntitlementRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.privilegedaccessmanager.v1.DeleteEntitlementRequest)
private static final com.google.cloud.privilegedaccessmanager.v1.DeleteEntitlementRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.privilegedaccessmanager.v1.DeleteEntitlementRequest();
}
public static com.google.cloud.privilegedaccessmanager.v1.DeleteEntitlementRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<DeleteEntitlementRequest> PARSER =
new com.google.protobuf.AbstractParser<DeleteEntitlementRequest>() {
@java.lang.Override
public DeleteEntitlementRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<DeleteEntitlementRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<DeleteEntitlementRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.privilegedaccessmanager.v1.DeleteEntitlementRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 34,971 | java-configdelivery/proto-google-cloud-configdelivery-v1/src/main/java/com/google/cloud/configdelivery/v1/DeleteReleaseRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/configdelivery/v1/config_delivery.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.configdelivery.v1;
/**
*
*
* <pre>
* Message for deleting a Release
* </pre>
*
* Protobuf type {@code google.cloud.configdelivery.v1.DeleteReleaseRequest}
*/
public final class DeleteReleaseRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.configdelivery.v1.DeleteReleaseRequest)
DeleteReleaseRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use DeleteReleaseRequest.newBuilder() to construct.
private DeleteReleaseRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private DeleteReleaseRequest() {
name_ = "";
requestId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new DeleteReleaseRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.configdelivery.v1.ConfigDeliveryProto
.internal_static_google_cloud_configdelivery_v1_DeleteReleaseRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.configdelivery.v1.ConfigDeliveryProto
.internal_static_google_cloud_configdelivery_v1_DeleteReleaseRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.configdelivery.v1.DeleteReleaseRequest.class,
com.google.cloud.configdelivery.v1.DeleteReleaseRequest.Builder.class);
}
public static final int NAME_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object name_ = "";
/**
*
*
* <pre>
* Required. Name of the resource
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The name.
*/
@java.lang.Override
public java.lang.String getName() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. Name of the resource
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for name.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int REQUEST_ID_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object requestId_ = "";
/**
*
*
* <pre>
* Optional. An optional request ID to identify requests. Specify a unique
* request ID so that if you must retry your request, the server will know to
* ignore the request if it has already been completed. The server will
* guarantee that for at least 60 minutes after the first request.
*
* For example, consider a situation where you make an initial request and the
* request times out. If you make the request again with the same request
* ID, the server can check if original operation with the same request ID
* was received, and if so, will ignore the second request. This prevents
* clients from accidentally creating duplicate commitments.
*
* The request ID must be a valid UUID with the exception that zero UUID is
* not supported (00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>
* string request_id = 2 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_info) = { ... }
* </code>
*
* @return The requestId.
*/
@java.lang.Override
public java.lang.String getRequestId() {
java.lang.Object ref = requestId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
requestId_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. An optional request ID to identify requests. Specify a unique
* request ID so that if you must retry your request, the server will know to
* ignore the request if it has already been completed. The server will
* guarantee that for at least 60 minutes after the first request.
*
* For example, consider a situation where you make an initial request and the
* request times out. If you make the request again with the same request
* ID, the server can check if original operation with the same request ID
* was received, and if so, will ignore the second request. This prevents
* clients from accidentally creating duplicate commitments.
*
* The request ID must be a valid UUID with the exception that zero UUID is
* not supported (00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>
* string request_id = 2 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_info) = { ... }
* </code>
*
* @return The bytes for requestId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getRequestIdBytes() {
java.lang.Object ref = requestId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
requestId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int FORCE_FIELD_NUMBER = 3;
private boolean force_ = false;
/**
*
*
* <pre>
* Optional. If set to true, any variants of this release will also be
* deleted. (Otherwise, the request will only work if the release has no
* variants.)
* </pre>
*
* <code>bool force = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The force.
*/
@java.lang.Override
public boolean getForce() {
return force_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(requestId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, requestId_);
}
if (force_ != false) {
output.writeBool(3, force_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(requestId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, requestId_);
}
if (force_ != false) {
size += com.google.protobuf.CodedOutputStream.computeBoolSize(3, force_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.configdelivery.v1.DeleteReleaseRequest)) {
return super.equals(obj);
}
com.google.cloud.configdelivery.v1.DeleteReleaseRequest other =
(com.google.cloud.configdelivery.v1.DeleteReleaseRequest) obj;
if (!getName().equals(other.getName())) return false;
if (!getRequestId().equals(other.getRequestId())) return false;
if (getForce() != other.getForce()) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + NAME_FIELD_NUMBER;
hash = (53 * hash) + getName().hashCode();
hash = (37 * hash) + REQUEST_ID_FIELD_NUMBER;
hash = (53 * hash) + getRequestId().hashCode();
hash = (37 * hash) + FORCE_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getForce());
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.configdelivery.v1.DeleteReleaseRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.configdelivery.v1.DeleteReleaseRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.configdelivery.v1.DeleteReleaseRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.configdelivery.v1.DeleteReleaseRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.configdelivery.v1.DeleteReleaseRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.configdelivery.v1.DeleteReleaseRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.configdelivery.v1.DeleteReleaseRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.configdelivery.v1.DeleteReleaseRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.configdelivery.v1.DeleteReleaseRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.configdelivery.v1.DeleteReleaseRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.configdelivery.v1.DeleteReleaseRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.configdelivery.v1.DeleteReleaseRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.configdelivery.v1.DeleteReleaseRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Message for deleting a Release
* </pre>
*
* Protobuf type {@code google.cloud.configdelivery.v1.DeleteReleaseRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.configdelivery.v1.DeleteReleaseRequest)
com.google.cloud.configdelivery.v1.DeleteReleaseRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.configdelivery.v1.ConfigDeliveryProto
.internal_static_google_cloud_configdelivery_v1_DeleteReleaseRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.configdelivery.v1.ConfigDeliveryProto
.internal_static_google_cloud_configdelivery_v1_DeleteReleaseRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.configdelivery.v1.DeleteReleaseRequest.class,
com.google.cloud.configdelivery.v1.DeleteReleaseRequest.Builder.class);
}
// Construct using com.google.cloud.configdelivery.v1.DeleteReleaseRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
name_ = "";
requestId_ = "";
force_ = false;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.configdelivery.v1.ConfigDeliveryProto
.internal_static_google_cloud_configdelivery_v1_DeleteReleaseRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.configdelivery.v1.DeleteReleaseRequest getDefaultInstanceForType() {
return com.google.cloud.configdelivery.v1.DeleteReleaseRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.configdelivery.v1.DeleteReleaseRequest build() {
com.google.cloud.configdelivery.v1.DeleteReleaseRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.configdelivery.v1.DeleteReleaseRequest buildPartial() {
com.google.cloud.configdelivery.v1.DeleteReleaseRequest result =
new com.google.cloud.configdelivery.v1.DeleteReleaseRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.configdelivery.v1.DeleteReleaseRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.name_ = name_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.requestId_ = requestId_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.force_ = force_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.configdelivery.v1.DeleteReleaseRequest) {
return mergeFrom((com.google.cloud.configdelivery.v1.DeleteReleaseRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.configdelivery.v1.DeleteReleaseRequest other) {
if (other == com.google.cloud.configdelivery.v1.DeleteReleaseRequest.getDefaultInstance())
return this;
if (!other.getName().isEmpty()) {
name_ = other.name_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getRequestId().isEmpty()) {
requestId_ = other.requestId_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.getForce() != false) {
setForce(other.getForce());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
name_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
requestId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 24:
{
force_ = input.readBool();
bitField0_ |= 0x00000004;
break;
} // case 24
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object name_ = "";
/**
*
*
* <pre>
* Required. Name of the resource
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The name.
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. Name of the resource
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for name.
*/
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. Name of the resource
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The name to set.
* @return This builder for chaining.
*/
public Builder setName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
name_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Name of the resource
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearName() {
name_ = getDefaultInstance().getName();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Name of the resource
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for name to set.
* @return This builder for chaining.
*/
public Builder setNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
name_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object requestId_ = "";
/**
*
*
* <pre>
* Optional. An optional request ID to identify requests. Specify a unique
* request ID so that if you must retry your request, the server will know to
* ignore the request if it has already been completed. The server will
* guarantee that for at least 60 minutes after the first request.
*
* For example, consider a situation where you make an initial request and the
* request times out. If you make the request again with the same request
* ID, the server can check if original operation with the same request ID
* was received, and if so, will ignore the second request. This prevents
* clients from accidentally creating duplicate commitments.
*
* The request ID must be a valid UUID with the exception that zero UUID is
* not supported (00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>
* string request_id = 2 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_info) = { ... }
* </code>
*
* @return The requestId.
*/
public java.lang.String getRequestId() {
java.lang.Object ref = requestId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
requestId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. An optional request ID to identify requests. Specify a unique
* request ID so that if you must retry your request, the server will know to
* ignore the request if it has already been completed. The server will
* guarantee that for at least 60 minutes after the first request.
*
* For example, consider a situation where you make an initial request and the
* request times out. If you make the request again with the same request
* ID, the server can check if original operation with the same request ID
* was received, and if so, will ignore the second request. This prevents
* clients from accidentally creating duplicate commitments.
*
* The request ID must be a valid UUID with the exception that zero UUID is
* not supported (00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>
* string request_id = 2 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_info) = { ... }
* </code>
*
* @return The bytes for requestId.
*/
public com.google.protobuf.ByteString getRequestIdBytes() {
java.lang.Object ref = requestId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
requestId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. An optional request ID to identify requests. Specify a unique
* request ID so that if you must retry your request, the server will know to
* ignore the request if it has already been completed. The server will
* guarantee that for at least 60 minutes after the first request.
*
* For example, consider a situation where you make an initial request and the
* request times out. If you make the request again with the same request
* ID, the server can check if original operation with the same request ID
* was received, and if so, will ignore the second request. This prevents
* clients from accidentally creating duplicate commitments.
*
* The request ID must be a valid UUID with the exception that zero UUID is
* not supported (00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>
* string request_id = 2 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_info) = { ... }
* </code>
*
* @param value The requestId to set.
* @return This builder for chaining.
*/
public Builder setRequestId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
requestId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. An optional request ID to identify requests. Specify a unique
* request ID so that if you must retry your request, the server will know to
* ignore the request if it has already been completed. The server will
* guarantee that for at least 60 minutes after the first request.
*
* For example, consider a situation where you make an initial request and the
* request times out. If you make the request again with the same request
* ID, the server can check if original operation with the same request ID
* was received, and if so, will ignore the second request. This prevents
* clients from accidentally creating duplicate commitments.
*
* The request ID must be a valid UUID with the exception that zero UUID is
* not supported (00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>
* string request_id = 2 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_info) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearRequestId() {
requestId_ = getDefaultInstance().getRequestId();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. An optional request ID to identify requests. Specify a unique
* request ID so that if you must retry your request, the server will know to
* ignore the request if it has already been completed. The server will
* guarantee that for at least 60 minutes after the first request.
*
* For example, consider a situation where you make an initial request and the
* request times out. If you make the request again with the same request
* ID, the server can check if original operation with the same request ID
* was received, and if so, will ignore the second request. This prevents
* clients from accidentally creating duplicate commitments.
*
* The request ID must be a valid UUID with the exception that zero UUID is
* not supported (00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>
* string request_id = 2 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_info) = { ... }
* </code>
*
* @param value The bytes for requestId to set.
* @return This builder for chaining.
*/
public Builder setRequestIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
requestId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private boolean force_;
/**
*
*
* <pre>
* Optional. If set to true, any variants of this release will also be
* deleted. (Otherwise, the request will only work if the release has no
* variants.)
* </pre>
*
* <code>bool force = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The force.
*/
@java.lang.Override
public boolean getForce() {
return force_;
}
/**
*
*
* <pre>
* Optional. If set to true, any variants of this release will also be
* deleted. (Otherwise, the request will only work if the release has no
* variants.)
* </pre>
*
* <code>bool force = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The force to set.
* @return This builder for chaining.
*/
public Builder setForce(boolean value) {
force_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. If set to true, any variants of this release will also be
* deleted. (Otherwise, the request will only work if the release has no
* variants.)
* </pre>
*
* <code>bool force = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearForce() {
bitField0_ = (bitField0_ & ~0x00000004);
force_ = false;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.configdelivery.v1.DeleteReleaseRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.configdelivery.v1.DeleteReleaseRequest)
private static final com.google.cloud.configdelivery.v1.DeleteReleaseRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.configdelivery.v1.DeleteReleaseRequest();
}
public static com.google.cloud.configdelivery.v1.DeleteReleaseRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<DeleteReleaseRequest> PARSER =
new com.google.protobuf.AbstractParser<DeleteReleaseRequest>() {
@java.lang.Override
public DeleteReleaseRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<DeleteReleaseRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<DeleteReleaseRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.configdelivery.v1.DeleteReleaseRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 34,972 | java-dialogflow/proto-google-cloud-dialogflow-v2/src/main/java/com/google/cloud/dialogflow/v2/UpdateSessionEntityTypeRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/dialogflow/v2/session_entity_type.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.dialogflow.v2;
/**
*
*
* <pre>
* The request message for
* [SessionEntityTypes.UpdateSessionEntityType][google.cloud.dialogflow.v2.SessionEntityTypes.UpdateSessionEntityType].
* </pre>
*
* Protobuf type {@code google.cloud.dialogflow.v2.UpdateSessionEntityTypeRequest}
*/
public final class UpdateSessionEntityTypeRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.dialogflow.v2.UpdateSessionEntityTypeRequest)
UpdateSessionEntityTypeRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateSessionEntityTypeRequest.newBuilder() to construct.
private UpdateSessionEntityTypeRequest(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateSessionEntityTypeRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateSessionEntityTypeRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.v2.SessionEntityTypeProto
.internal_static_google_cloud_dialogflow_v2_UpdateSessionEntityTypeRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.v2.SessionEntityTypeProto
.internal_static_google_cloud_dialogflow_v2_UpdateSessionEntityTypeRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.v2.UpdateSessionEntityTypeRequest.class,
com.google.cloud.dialogflow.v2.UpdateSessionEntityTypeRequest.Builder.class);
}
private int bitField0_;
public static final int SESSION_ENTITY_TYPE_FIELD_NUMBER = 1;
private com.google.cloud.dialogflow.v2.SessionEntityType sessionEntityType_;
/**
*
*
* <pre>
* Required. The session entity type to update.
* </pre>
*
* <code>
* .google.cloud.dialogflow.v2.SessionEntityType session_entity_type = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the sessionEntityType field is set.
*/
@java.lang.Override
public boolean hasSessionEntityType() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The session entity type to update.
* </pre>
*
* <code>
* .google.cloud.dialogflow.v2.SessionEntityType session_entity_type = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The sessionEntityType.
*/
@java.lang.Override
public com.google.cloud.dialogflow.v2.SessionEntityType getSessionEntityType() {
return sessionEntityType_ == null
? com.google.cloud.dialogflow.v2.SessionEntityType.getDefaultInstance()
: sessionEntityType_;
}
/**
*
*
* <pre>
* Required. The session entity type to update.
* </pre>
*
* <code>
* .google.cloud.dialogflow.v2.SessionEntityType session_entity_type = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.dialogflow.v2.SessionEntityTypeOrBuilder getSessionEntityTypeOrBuilder() {
return sessionEntityType_ == null
? com.google.cloud.dialogflow.v2.SessionEntityType.getDefaultInstance()
: sessionEntityType_;
}
public static final int UPDATE_MASK_FIELD_NUMBER = 2;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* Optional. The mask to control which fields get updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Optional. The mask to control which fields get updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* Optional. The mask to control which fields get updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getSessionEntityType());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getUpdateMask());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getSessionEntityType());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.dialogflow.v2.UpdateSessionEntityTypeRequest)) {
return super.equals(obj);
}
com.google.cloud.dialogflow.v2.UpdateSessionEntityTypeRequest other =
(com.google.cloud.dialogflow.v2.UpdateSessionEntityTypeRequest) obj;
if (hasSessionEntityType() != other.hasSessionEntityType()) return false;
if (hasSessionEntityType()) {
if (!getSessionEntityType().equals(other.getSessionEntityType())) return false;
}
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasSessionEntityType()) {
hash = (37 * hash) + SESSION_ENTITY_TYPE_FIELD_NUMBER;
hash = (53 * hash) + getSessionEntityType().hashCode();
}
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.dialogflow.v2.UpdateSessionEntityTypeRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.v2.UpdateSessionEntityTypeRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2.UpdateSessionEntityTypeRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.v2.UpdateSessionEntityTypeRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2.UpdateSessionEntityTypeRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.v2.UpdateSessionEntityTypeRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2.UpdateSessionEntityTypeRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.v2.UpdateSessionEntityTypeRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2.UpdateSessionEntityTypeRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.v2.UpdateSessionEntityTypeRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2.UpdateSessionEntityTypeRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.v2.UpdateSessionEntityTypeRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.dialogflow.v2.UpdateSessionEntityTypeRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The request message for
* [SessionEntityTypes.UpdateSessionEntityType][google.cloud.dialogflow.v2.SessionEntityTypes.UpdateSessionEntityType].
* </pre>
*
* Protobuf type {@code google.cloud.dialogflow.v2.UpdateSessionEntityTypeRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.dialogflow.v2.UpdateSessionEntityTypeRequest)
com.google.cloud.dialogflow.v2.UpdateSessionEntityTypeRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.v2.SessionEntityTypeProto
.internal_static_google_cloud_dialogflow_v2_UpdateSessionEntityTypeRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.v2.SessionEntityTypeProto
.internal_static_google_cloud_dialogflow_v2_UpdateSessionEntityTypeRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.v2.UpdateSessionEntityTypeRequest.class,
com.google.cloud.dialogflow.v2.UpdateSessionEntityTypeRequest.Builder.class);
}
// Construct using com.google.cloud.dialogflow.v2.UpdateSessionEntityTypeRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getSessionEntityTypeFieldBuilder();
getUpdateMaskFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
sessionEntityType_ = null;
if (sessionEntityTypeBuilder_ != null) {
sessionEntityTypeBuilder_.dispose();
sessionEntityTypeBuilder_ = null;
}
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.dialogflow.v2.SessionEntityTypeProto
.internal_static_google_cloud_dialogflow_v2_UpdateSessionEntityTypeRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.dialogflow.v2.UpdateSessionEntityTypeRequest
getDefaultInstanceForType() {
return com.google.cloud.dialogflow.v2.UpdateSessionEntityTypeRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.dialogflow.v2.UpdateSessionEntityTypeRequest build() {
com.google.cloud.dialogflow.v2.UpdateSessionEntityTypeRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.dialogflow.v2.UpdateSessionEntityTypeRequest buildPartial() {
com.google.cloud.dialogflow.v2.UpdateSessionEntityTypeRequest result =
new com.google.cloud.dialogflow.v2.UpdateSessionEntityTypeRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.dialogflow.v2.UpdateSessionEntityTypeRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.sessionEntityType_ =
sessionEntityTypeBuilder_ == null
? sessionEntityType_
: sessionEntityTypeBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.dialogflow.v2.UpdateSessionEntityTypeRequest) {
return mergeFrom((com.google.cloud.dialogflow.v2.UpdateSessionEntityTypeRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.dialogflow.v2.UpdateSessionEntityTypeRequest other) {
if (other
== com.google.cloud.dialogflow.v2.UpdateSessionEntityTypeRequest.getDefaultInstance())
return this;
if (other.hasSessionEntityType()) {
mergeSessionEntityType(other.getSessionEntityType());
}
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(
getSessionEntityTypeFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.cloud.dialogflow.v2.SessionEntityType sessionEntityType_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dialogflow.v2.SessionEntityType,
com.google.cloud.dialogflow.v2.SessionEntityType.Builder,
com.google.cloud.dialogflow.v2.SessionEntityTypeOrBuilder>
sessionEntityTypeBuilder_;
/**
*
*
* <pre>
* Required. The session entity type to update.
* </pre>
*
* <code>
* .google.cloud.dialogflow.v2.SessionEntityType session_entity_type = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the sessionEntityType field is set.
*/
public boolean hasSessionEntityType() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The session entity type to update.
* </pre>
*
* <code>
* .google.cloud.dialogflow.v2.SessionEntityType session_entity_type = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The sessionEntityType.
*/
public com.google.cloud.dialogflow.v2.SessionEntityType getSessionEntityType() {
if (sessionEntityTypeBuilder_ == null) {
return sessionEntityType_ == null
? com.google.cloud.dialogflow.v2.SessionEntityType.getDefaultInstance()
: sessionEntityType_;
} else {
return sessionEntityTypeBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The session entity type to update.
* </pre>
*
* <code>
* .google.cloud.dialogflow.v2.SessionEntityType session_entity_type = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setSessionEntityType(com.google.cloud.dialogflow.v2.SessionEntityType value) {
if (sessionEntityTypeBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
sessionEntityType_ = value;
} else {
sessionEntityTypeBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The session entity type to update.
* </pre>
*
* <code>
* .google.cloud.dialogflow.v2.SessionEntityType session_entity_type = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setSessionEntityType(
com.google.cloud.dialogflow.v2.SessionEntityType.Builder builderForValue) {
if (sessionEntityTypeBuilder_ == null) {
sessionEntityType_ = builderForValue.build();
} else {
sessionEntityTypeBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The session entity type to update.
* </pre>
*
* <code>
* .google.cloud.dialogflow.v2.SessionEntityType session_entity_type = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeSessionEntityType(com.google.cloud.dialogflow.v2.SessionEntityType value) {
if (sessionEntityTypeBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& sessionEntityType_ != null
&& sessionEntityType_
!= com.google.cloud.dialogflow.v2.SessionEntityType.getDefaultInstance()) {
getSessionEntityTypeBuilder().mergeFrom(value);
} else {
sessionEntityType_ = value;
}
} else {
sessionEntityTypeBuilder_.mergeFrom(value);
}
if (sessionEntityType_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The session entity type to update.
* </pre>
*
* <code>
* .google.cloud.dialogflow.v2.SessionEntityType session_entity_type = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearSessionEntityType() {
bitField0_ = (bitField0_ & ~0x00000001);
sessionEntityType_ = null;
if (sessionEntityTypeBuilder_ != null) {
sessionEntityTypeBuilder_.dispose();
sessionEntityTypeBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The session entity type to update.
* </pre>
*
* <code>
* .google.cloud.dialogflow.v2.SessionEntityType session_entity_type = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.dialogflow.v2.SessionEntityType.Builder getSessionEntityTypeBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getSessionEntityTypeFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The session entity type to update.
* </pre>
*
* <code>
* .google.cloud.dialogflow.v2.SessionEntityType session_entity_type = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.dialogflow.v2.SessionEntityTypeOrBuilder
getSessionEntityTypeOrBuilder() {
if (sessionEntityTypeBuilder_ != null) {
return sessionEntityTypeBuilder_.getMessageOrBuilder();
} else {
return sessionEntityType_ == null
? com.google.cloud.dialogflow.v2.SessionEntityType.getDefaultInstance()
: sessionEntityType_;
}
}
/**
*
*
* <pre>
* Required. The session entity type to update.
* </pre>
*
* <code>
* .google.cloud.dialogflow.v2.SessionEntityType session_entity_type = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dialogflow.v2.SessionEntityType,
com.google.cloud.dialogflow.v2.SessionEntityType.Builder,
com.google.cloud.dialogflow.v2.SessionEntityTypeOrBuilder>
getSessionEntityTypeFieldBuilder() {
if (sessionEntityTypeBuilder_ == null) {
sessionEntityTypeBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dialogflow.v2.SessionEntityType,
com.google.cloud.dialogflow.v2.SessionEntityType.Builder,
com.google.cloud.dialogflow.v2.SessionEntityTypeOrBuilder>(
getSessionEntityType(), getParentForChildren(), isClean());
sessionEntityType_ = null;
}
return sessionEntityTypeBuilder_;
}
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* Optional. The mask to control which fields get updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Optional. The mask to control which fields get updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Optional. The mask to control which fields get updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
} else {
updateMaskBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The mask to control which fields get updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The mask to control which fields get updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& updateMask_ != null
&& updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) {
getUpdateMaskBuilder().mergeFrom(value);
} else {
updateMask_ = value;
}
} else {
updateMaskBuilder_.mergeFrom(value);
}
if (updateMask_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Optional. The mask to control which fields get updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder clearUpdateMask() {
bitField0_ = (bitField0_ & ~0x00000002);
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The mask to control which fields get updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Optional. The mask to control which fields get updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* Optional. The mask to control which fields get updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.dialogflow.v2.UpdateSessionEntityTypeRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.dialogflow.v2.UpdateSessionEntityTypeRequest)
private static final com.google.cloud.dialogflow.v2.UpdateSessionEntityTypeRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.dialogflow.v2.UpdateSessionEntityTypeRequest();
}
public static com.google.cloud.dialogflow.v2.UpdateSessionEntityTypeRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateSessionEntityTypeRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateSessionEntityTypeRequest>() {
@java.lang.Override
public UpdateSessionEntityTypeRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdateSessionEntityTypeRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateSessionEntityTypeRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.dialogflow.v2.UpdateSessionEntityTypeRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 34,995 | java-aiplatform/proto-google-cloud-aiplatform-v1/src/main/java/com/google/cloud/aiplatform/v1/BatchMigrateResourcesResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/aiplatform/v1/migration_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.aiplatform.v1;
/**
*
*
* <pre>
* Response message for
* [MigrationService.BatchMigrateResources][google.cloud.aiplatform.v1.MigrationService.BatchMigrateResources].
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1.BatchMigrateResourcesResponse}
*/
public final class BatchMigrateResourcesResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1.BatchMigrateResourcesResponse)
BatchMigrateResourcesResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use BatchMigrateResourcesResponse.newBuilder() to construct.
private BatchMigrateResourcesResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private BatchMigrateResourcesResponse() {
migrateResourceResponses_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new BatchMigrateResourcesResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1.MigrationServiceProto
.internal_static_google_cloud_aiplatform_v1_BatchMigrateResourcesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1.MigrationServiceProto
.internal_static_google_cloud_aiplatform_v1_BatchMigrateResourcesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1.BatchMigrateResourcesResponse.class,
com.google.cloud.aiplatform.v1.BatchMigrateResourcesResponse.Builder.class);
}
public static final int MIGRATE_RESOURCE_RESPONSES_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.aiplatform.v1.MigrateResourceResponse>
migrateResourceResponses_;
/**
*
*
* <pre>
* Successfully migrated resources.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.MigrateResourceResponse migrate_resource_responses = 1;
* </code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.aiplatform.v1.MigrateResourceResponse>
getMigrateResourceResponsesList() {
return migrateResourceResponses_;
}
/**
*
*
* <pre>
* Successfully migrated resources.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.MigrateResourceResponse migrate_resource_responses = 1;
* </code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.aiplatform.v1.MigrateResourceResponseOrBuilder>
getMigrateResourceResponsesOrBuilderList() {
return migrateResourceResponses_;
}
/**
*
*
* <pre>
* Successfully migrated resources.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.MigrateResourceResponse migrate_resource_responses = 1;
* </code>
*/
@java.lang.Override
public int getMigrateResourceResponsesCount() {
return migrateResourceResponses_.size();
}
/**
*
*
* <pre>
* Successfully migrated resources.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.MigrateResourceResponse migrate_resource_responses = 1;
* </code>
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1.MigrateResourceResponse getMigrateResourceResponses(
int index) {
return migrateResourceResponses_.get(index);
}
/**
*
*
* <pre>
* Successfully migrated resources.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.MigrateResourceResponse migrate_resource_responses = 1;
* </code>
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1.MigrateResourceResponseOrBuilder
getMigrateResourceResponsesOrBuilder(int index) {
return migrateResourceResponses_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < migrateResourceResponses_.size(); i++) {
output.writeMessage(1, migrateResourceResponses_.get(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < migrateResourceResponses_.size(); i++) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(
1, migrateResourceResponses_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.aiplatform.v1.BatchMigrateResourcesResponse)) {
return super.equals(obj);
}
com.google.cloud.aiplatform.v1.BatchMigrateResourcesResponse other =
(com.google.cloud.aiplatform.v1.BatchMigrateResourcesResponse) obj;
if (!getMigrateResourceResponsesList().equals(other.getMigrateResourceResponsesList()))
return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getMigrateResourceResponsesCount() > 0) {
hash = (37 * hash) + MIGRATE_RESOURCE_RESPONSES_FIELD_NUMBER;
hash = (53 * hash) + getMigrateResourceResponsesList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.aiplatform.v1.BatchMigrateResourcesResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.BatchMigrateResourcesResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.BatchMigrateResourcesResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.BatchMigrateResourcesResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.BatchMigrateResourcesResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.BatchMigrateResourcesResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.BatchMigrateResourcesResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.BatchMigrateResourcesResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.BatchMigrateResourcesResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.BatchMigrateResourcesResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.BatchMigrateResourcesResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.BatchMigrateResourcesResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.aiplatform.v1.BatchMigrateResourcesResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for
* [MigrationService.BatchMigrateResources][google.cloud.aiplatform.v1.MigrationService.BatchMigrateResources].
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1.BatchMigrateResourcesResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1.BatchMigrateResourcesResponse)
com.google.cloud.aiplatform.v1.BatchMigrateResourcesResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1.MigrationServiceProto
.internal_static_google_cloud_aiplatform_v1_BatchMigrateResourcesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1.MigrationServiceProto
.internal_static_google_cloud_aiplatform_v1_BatchMigrateResourcesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1.BatchMigrateResourcesResponse.class,
com.google.cloud.aiplatform.v1.BatchMigrateResourcesResponse.Builder.class);
}
// Construct using com.google.cloud.aiplatform.v1.BatchMigrateResourcesResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (migrateResourceResponsesBuilder_ == null) {
migrateResourceResponses_ = java.util.Collections.emptyList();
} else {
migrateResourceResponses_ = null;
migrateResourceResponsesBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.aiplatform.v1.MigrationServiceProto
.internal_static_google_cloud_aiplatform_v1_BatchMigrateResourcesResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.BatchMigrateResourcesResponse
getDefaultInstanceForType() {
return com.google.cloud.aiplatform.v1.BatchMigrateResourcesResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.BatchMigrateResourcesResponse build() {
com.google.cloud.aiplatform.v1.BatchMigrateResourcesResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.BatchMigrateResourcesResponse buildPartial() {
com.google.cloud.aiplatform.v1.BatchMigrateResourcesResponse result =
new com.google.cloud.aiplatform.v1.BatchMigrateResourcesResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.aiplatform.v1.BatchMigrateResourcesResponse result) {
if (migrateResourceResponsesBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
migrateResourceResponses_ =
java.util.Collections.unmodifiableList(migrateResourceResponses_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.migrateResourceResponses_ = migrateResourceResponses_;
} else {
result.migrateResourceResponses_ = migrateResourceResponsesBuilder_.build();
}
}
private void buildPartial0(
com.google.cloud.aiplatform.v1.BatchMigrateResourcesResponse result) {
int from_bitField0_ = bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.aiplatform.v1.BatchMigrateResourcesResponse) {
return mergeFrom((com.google.cloud.aiplatform.v1.BatchMigrateResourcesResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.aiplatform.v1.BatchMigrateResourcesResponse other) {
if (other
== com.google.cloud.aiplatform.v1.BatchMigrateResourcesResponse.getDefaultInstance())
return this;
if (migrateResourceResponsesBuilder_ == null) {
if (!other.migrateResourceResponses_.isEmpty()) {
if (migrateResourceResponses_.isEmpty()) {
migrateResourceResponses_ = other.migrateResourceResponses_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureMigrateResourceResponsesIsMutable();
migrateResourceResponses_.addAll(other.migrateResourceResponses_);
}
onChanged();
}
} else {
if (!other.migrateResourceResponses_.isEmpty()) {
if (migrateResourceResponsesBuilder_.isEmpty()) {
migrateResourceResponsesBuilder_.dispose();
migrateResourceResponsesBuilder_ = null;
migrateResourceResponses_ = other.migrateResourceResponses_;
bitField0_ = (bitField0_ & ~0x00000001);
migrateResourceResponsesBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getMigrateResourceResponsesFieldBuilder()
: null;
} else {
migrateResourceResponsesBuilder_.addAllMessages(other.migrateResourceResponses_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.aiplatform.v1.MigrateResourceResponse m =
input.readMessage(
com.google.cloud.aiplatform.v1.MigrateResourceResponse.parser(),
extensionRegistry);
if (migrateResourceResponsesBuilder_ == null) {
ensureMigrateResourceResponsesIsMutable();
migrateResourceResponses_.add(m);
} else {
migrateResourceResponsesBuilder_.addMessage(m);
}
break;
} // case 10
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.aiplatform.v1.MigrateResourceResponse>
migrateResourceResponses_ = java.util.Collections.emptyList();
private void ensureMigrateResourceResponsesIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
migrateResourceResponses_ =
new java.util.ArrayList<com.google.cloud.aiplatform.v1.MigrateResourceResponse>(
migrateResourceResponses_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.aiplatform.v1.MigrateResourceResponse,
com.google.cloud.aiplatform.v1.MigrateResourceResponse.Builder,
com.google.cloud.aiplatform.v1.MigrateResourceResponseOrBuilder>
migrateResourceResponsesBuilder_;
/**
*
*
* <pre>
* Successfully migrated resources.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.MigrateResourceResponse migrate_resource_responses = 1;
* </code>
*/
public java.util.List<com.google.cloud.aiplatform.v1.MigrateResourceResponse>
getMigrateResourceResponsesList() {
if (migrateResourceResponsesBuilder_ == null) {
return java.util.Collections.unmodifiableList(migrateResourceResponses_);
} else {
return migrateResourceResponsesBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* Successfully migrated resources.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.MigrateResourceResponse migrate_resource_responses = 1;
* </code>
*/
public int getMigrateResourceResponsesCount() {
if (migrateResourceResponsesBuilder_ == null) {
return migrateResourceResponses_.size();
} else {
return migrateResourceResponsesBuilder_.getCount();
}
}
/**
*
*
* <pre>
* Successfully migrated resources.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.MigrateResourceResponse migrate_resource_responses = 1;
* </code>
*/
public com.google.cloud.aiplatform.v1.MigrateResourceResponse getMigrateResourceResponses(
int index) {
if (migrateResourceResponsesBuilder_ == null) {
return migrateResourceResponses_.get(index);
} else {
return migrateResourceResponsesBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* Successfully migrated resources.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.MigrateResourceResponse migrate_resource_responses = 1;
* </code>
*/
public Builder setMigrateResourceResponses(
int index, com.google.cloud.aiplatform.v1.MigrateResourceResponse value) {
if (migrateResourceResponsesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureMigrateResourceResponsesIsMutable();
migrateResourceResponses_.set(index, value);
onChanged();
} else {
migrateResourceResponsesBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Successfully migrated resources.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.MigrateResourceResponse migrate_resource_responses = 1;
* </code>
*/
public Builder setMigrateResourceResponses(
int index, com.google.cloud.aiplatform.v1.MigrateResourceResponse.Builder builderForValue) {
if (migrateResourceResponsesBuilder_ == null) {
ensureMigrateResourceResponsesIsMutable();
migrateResourceResponses_.set(index, builderForValue.build());
onChanged();
} else {
migrateResourceResponsesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Successfully migrated resources.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.MigrateResourceResponse migrate_resource_responses = 1;
* </code>
*/
public Builder addMigrateResourceResponses(
com.google.cloud.aiplatform.v1.MigrateResourceResponse value) {
if (migrateResourceResponsesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureMigrateResourceResponsesIsMutable();
migrateResourceResponses_.add(value);
onChanged();
} else {
migrateResourceResponsesBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* Successfully migrated resources.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.MigrateResourceResponse migrate_resource_responses = 1;
* </code>
*/
public Builder addMigrateResourceResponses(
int index, com.google.cloud.aiplatform.v1.MigrateResourceResponse value) {
if (migrateResourceResponsesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureMigrateResourceResponsesIsMutable();
migrateResourceResponses_.add(index, value);
onChanged();
} else {
migrateResourceResponsesBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Successfully migrated resources.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.MigrateResourceResponse migrate_resource_responses = 1;
* </code>
*/
public Builder addMigrateResourceResponses(
com.google.cloud.aiplatform.v1.MigrateResourceResponse.Builder builderForValue) {
if (migrateResourceResponsesBuilder_ == null) {
ensureMigrateResourceResponsesIsMutable();
migrateResourceResponses_.add(builderForValue.build());
onChanged();
} else {
migrateResourceResponsesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Successfully migrated resources.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.MigrateResourceResponse migrate_resource_responses = 1;
* </code>
*/
public Builder addMigrateResourceResponses(
int index, com.google.cloud.aiplatform.v1.MigrateResourceResponse.Builder builderForValue) {
if (migrateResourceResponsesBuilder_ == null) {
ensureMigrateResourceResponsesIsMutable();
migrateResourceResponses_.add(index, builderForValue.build());
onChanged();
} else {
migrateResourceResponsesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Successfully migrated resources.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.MigrateResourceResponse migrate_resource_responses = 1;
* </code>
*/
public Builder addAllMigrateResourceResponses(
java.lang.Iterable<? extends com.google.cloud.aiplatform.v1.MigrateResourceResponse>
values) {
if (migrateResourceResponsesBuilder_ == null) {
ensureMigrateResourceResponsesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, migrateResourceResponses_);
onChanged();
} else {
migrateResourceResponsesBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* Successfully migrated resources.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.MigrateResourceResponse migrate_resource_responses = 1;
* </code>
*/
public Builder clearMigrateResourceResponses() {
if (migrateResourceResponsesBuilder_ == null) {
migrateResourceResponses_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
migrateResourceResponsesBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* Successfully migrated resources.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.MigrateResourceResponse migrate_resource_responses = 1;
* </code>
*/
public Builder removeMigrateResourceResponses(int index) {
if (migrateResourceResponsesBuilder_ == null) {
ensureMigrateResourceResponsesIsMutable();
migrateResourceResponses_.remove(index);
onChanged();
} else {
migrateResourceResponsesBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* Successfully migrated resources.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.MigrateResourceResponse migrate_resource_responses = 1;
* </code>
*/
public com.google.cloud.aiplatform.v1.MigrateResourceResponse.Builder
getMigrateResourceResponsesBuilder(int index) {
return getMigrateResourceResponsesFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* Successfully migrated resources.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.MigrateResourceResponse migrate_resource_responses = 1;
* </code>
*/
public com.google.cloud.aiplatform.v1.MigrateResourceResponseOrBuilder
getMigrateResourceResponsesOrBuilder(int index) {
if (migrateResourceResponsesBuilder_ == null) {
return migrateResourceResponses_.get(index);
} else {
return migrateResourceResponsesBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* Successfully migrated resources.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.MigrateResourceResponse migrate_resource_responses = 1;
* </code>
*/
public java.util.List<? extends com.google.cloud.aiplatform.v1.MigrateResourceResponseOrBuilder>
getMigrateResourceResponsesOrBuilderList() {
if (migrateResourceResponsesBuilder_ != null) {
return migrateResourceResponsesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(migrateResourceResponses_);
}
}
/**
*
*
* <pre>
* Successfully migrated resources.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.MigrateResourceResponse migrate_resource_responses = 1;
* </code>
*/
public com.google.cloud.aiplatform.v1.MigrateResourceResponse.Builder
addMigrateResourceResponsesBuilder() {
return getMigrateResourceResponsesFieldBuilder()
.addBuilder(com.google.cloud.aiplatform.v1.MigrateResourceResponse.getDefaultInstance());
}
/**
*
*
* <pre>
* Successfully migrated resources.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.MigrateResourceResponse migrate_resource_responses = 1;
* </code>
*/
public com.google.cloud.aiplatform.v1.MigrateResourceResponse.Builder
addMigrateResourceResponsesBuilder(int index) {
return getMigrateResourceResponsesFieldBuilder()
.addBuilder(
index, com.google.cloud.aiplatform.v1.MigrateResourceResponse.getDefaultInstance());
}
/**
*
*
* <pre>
* Successfully migrated resources.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.MigrateResourceResponse migrate_resource_responses = 1;
* </code>
*/
public java.util.List<com.google.cloud.aiplatform.v1.MigrateResourceResponse.Builder>
getMigrateResourceResponsesBuilderList() {
return getMigrateResourceResponsesFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.aiplatform.v1.MigrateResourceResponse,
com.google.cloud.aiplatform.v1.MigrateResourceResponse.Builder,
com.google.cloud.aiplatform.v1.MigrateResourceResponseOrBuilder>
getMigrateResourceResponsesFieldBuilder() {
if (migrateResourceResponsesBuilder_ == null) {
migrateResourceResponsesBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.aiplatform.v1.MigrateResourceResponse,
com.google.cloud.aiplatform.v1.MigrateResourceResponse.Builder,
com.google.cloud.aiplatform.v1.MigrateResourceResponseOrBuilder>(
migrateResourceResponses_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
migrateResourceResponses_ = null;
}
return migrateResourceResponsesBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1.BatchMigrateResourcesResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1.BatchMigrateResourcesResponse)
private static final com.google.cloud.aiplatform.v1.BatchMigrateResourcesResponse
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1.BatchMigrateResourcesResponse();
}
public static com.google.cloud.aiplatform.v1.BatchMigrateResourcesResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<BatchMigrateResourcesResponse> PARSER =
new com.google.protobuf.AbstractParser<BatchMigrateResourcesResponse>() {
@java.lang.Override
public BatchMigrateResourcesResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<BatchMigrateResourcesResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<BatchMigrateResourcesResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.BatchMigrateResourcesResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-api-java-client-services | 35,183 | clients/google-api-services-bigquery/v2/1.30.1/com/google/api/services/bigquery/model/JobConfigurationQuery.java | /*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.bigquery.model;
/**
* Model definition for JobConfigurationQuery.
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the BigQuery API. For a detailed explanation see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class JobConfigurationQuery extends com.google.api.client.json.GenericJson {
/**
* [Optional] If true and query uses legacy SQL dialect, allows the query to produce arbitrarily
* large result tables at a slight cost in performance. Requires destinationTable to be set. For
* standard SQL queries, this flag is ignored and large results are always allowed. However, you
* must still set destinationTable when result size exceeds the allowed maximum response size.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean allowLargeResults;
/**
* [Beta] Clustering specification for the destination table. Must be specified with time-based
* partitioning, data in the table will be first partitioned and subsequently clustered.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private Clustering clustering;
/**
* Connection properties.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<ConnectionProperty> connectionProperties;
static {
// hack to force ProGuard to consider ConnectionProperty used, since otherwise it would be stripped out
// see https://github.com/google/google-api-java-client/issues/543
com.google.api.client.util.Data.nullOf(ConnectionProperty.class);
}
/**
* [Optional] Specifies whether the job is allowed to create new tables. The following values are
* supported: CREATE_IF_NEEDED: If the table does not exist, BigQuery creates the table.
* CREATE_NEVER: The table must already exist. If it does not, a 'notFound' error is returned in
* the job result. The default value is CREATE_IF_NEEDED. Creation, truncation and append actions
* occur as one atomic update upon job completion.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String createDisposition;
/**
* [Optional] Specifies the default dataset to use for unqualified table names in the query. Note
* that this does not alter behavior of unqualified dataset names.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private DatasetReference defaultDataset;
/**
* Custom encryption configuration (e.g., Cloud KMS keys).
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private EncryptionConfiguration destinationEncryptionConfiguration;
/**
* [Optional] Describes the table where the query results should be stored. If not present, a new
* table will be created to store the results. This property must be set for large results that
* exceed the maximum response size.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private TableReference destinationTable;
/**
* [Optional] If true and query uses legacy SQL dialect, flattens all nested and repeated fields
* in the query results. allowLargeResults must be true if this is set to false. For standard SQL
* queries, this flag is ignored and results are never flattened.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean flattenResults;
/**
* [Optional] Limits the billing tier for this job. Queries that have resource usage beyond this
* tier will fail (without incurring a charge). If unspecified, this will be set to your project
* default.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Integer maximumBillingTier;
/**
* [Optional] Limits the bytes billed for this job. Queries that will have bytes billed beyond
* this limit will fail (without incurring a charge). If unspecified, this will be set to your
* project default.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long maximumBytesBilled;
/**
* Standard SQL only. Set to POSITIONAL to use positional (?) query parameters or to NAMED to use
* named (@myparam) query parameters in this query.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String parameterMode;
/**
* [Deprecated] This property is deprecated.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean preserveNulls;
/**
* [Optional] Specifies a priority for the query. Possible values include INTERACTIVE and BATCH.
* The default value is INTERACTIVE.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String priority;
/**
* [Required] SQL query text to execute. The useLegacySql field can be used to indicate whether
* the query uses legacy SQL or standard SQL.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String query;
/**
* Query parameters for standard SQL queries.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<QueryParameter> queryParameters;
/**
* [TrustedTester] Range partitioning specification for this table. Only one of timePartitioning
* and rangePartitioning should be specified.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private RangePartitioning rangePartitioning;
/**
* Allows the schema of the destination table to be updated as a side effect of the query job.
* Schema update options are supported in two cases: when writeDisposition is WRITE_APPEND; when
* writeDisposition is WRITE_TRUNCATE and the destination table is a partition of a table,
* specified by partition decorators. For normal tables, WRITE_TRUNCATE will always overwrite the
* schema. One or more of the following values are specified: ALLOW_FIELD_ADDITION: allow adding a
* nullable field to the schema. ALLOW_FIELD_RELAXATION: allow relaxing a required field in the
* original schema to nullable.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<java.lang.String> schemaUpdateOptions;
/**
* [Optional] If querying an external data source outside of BigQuery, describes the data format,
* location and other properties of the data source. By defining these properties, the data source
* can then be queried as if it were a standard BigQuery table.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.Map<String, ExternalDataConfiguration> tableDefinitions;
static {
// hack to force ProGuard to consider ExternalDataConfiguration used, since otherwise it would be stripped out
// see https://github.com/google/google-api-java-client/issues/543
com.google.api.client.util.Data.nullOf(ExternalDataConfiguration.class);
}
/**
* Time-based partitioning specification for the destination table. Only one of timePartitioning
* and rangePartitioning should be specified.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private TimePartitioning timePartitioning;
/**
* Specifies whether to use BigQuery's legacy SQL dialect for this query. The default value is
* true. If set to false, the query will use BigQuery's standard SQL:
* https://cloud.google.com/bigquery/sql-reference/ When useLegacySql is set to false, the value
* of flattenResults is ignored; query will be run as if flattenResults is false.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean useLegacySql;
/**
* [Optional] Whether to look for the result in the query cache. The query cache is a best-effort
* cache that will be flushed whenever tables in the query are modified. Moreover, the query cache
* is only available when a query does not have a destination table specified. The default value
* is true.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean useQueryCache;
/**
* Describes user-defined function resources used in the query.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<UserDefinedFunctionResource> userDefinedFunctionResources;
/**
* [Optional] Specifies the action that occurs if the destination table already exists. The
* following values are supported: WRITE_TRUNCATE: If the table already exists, BigQuery
* overwrites the table data and uses the schema from the query result. WRITE_APPEND: If the table
* already exists, BigQuery appends the data to the table. WRITE_EMPTY: If the table already
* exists and contains data, a 'duplicate' error is returned in the job result. The default value
* is WRITE_EMPTY. Each action is atomic and only occurs if BigQuery is able to complete the job
* successfully. Creation, truncation and append actions occur as one atomic update upon job
* completion.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String writeDisposition;
/**
* [Optional] If true and query uses legacy SQL dialect, allows the query to produce arbitrarily
* large result tables at a slight cost in performance. Requires destinationTable to be set. For
* standard SQL queries, this flag is ignored and large results are always allowed. However, you
* must still set destinationTable when result size exceeds the allowed maximum response size.
* @return value or {@code null} for none
*/
public java.lang.Boolean getAllowLargeResults() {
return allowLargeResults;
}
/**
* [Optional] If true and query uses legacy SQL dialect, allows the query to produce arbitrarily
* large result tables at a slight cost in performance. Requires destinationTable to be set. For
* standard SQL queries, this flag is ignored and large results are always allowed. However, you
* must still set destinationTable when result size exceeds the allowed maximum response size.
* @param allowLargeResults allowLargeResults or {@code null} for none
*/
public JobConfigurationQuery setAllowLargeResults(java.lang.Boolean allowLargeResults) {
this.allowLargeResults = allowLargeResults;
return this;
}
/**
* Convenience method that returns only {@link Boolean#TRUE} or {@link Boolean#FALSE}.
*
* <p>
* Boolean properties can have four possible values:
* {@code null}, {@link com.google.api.client.util.Data#NULL_BOOLEAN}, {@link Boolean#TRUE}
* or {@link Boolean#FALSE}.
* </p>
*
* <p>
* This method returns {@link Boolean#TRUE} if the default of the property is {@link Boolean#TRUE}
* and it is {@code null} or {@link com.google.api.client.util.Data#NULL_BOOLEAN}.
* {@link Boolean#FALSE} is returned if the default of the property is {@link Boolean#FALSE} and
* it is {@code null} or {@link com.google.api.client.util.Data#NULL_BOOLEAN}.
* </p>
*
* <p>
*[ Optional] If true and query uses legacy SQL dialect, allows the query to produce arbitrarily large
[ result tables at a slight cost in performance. Requires destinationTable to be set. For standard
[ SQL queries, this flag is ignored and large results are always allowed. However, you must still
[ set destinationTable when result size exceeds the allowed maximum response size.
* </p>
*/
public boolean isAllowLargeResults() {
if (allowLargeResults == null || allowLargeResults == com.google.api.client.util.Data.NULL_BOOLEAN) {
return false;
}
return allowLargeResults;
}
/**
* [Beta] Clustering specification for the destination table. Must be specified with time-based
* partitioning, data in the table will be first partitioned and subsequently clustered.
* @return value or {@code null} for none
*/
public Clustering getClustering() {
return clustering;
}
/**
* [Beta] Clustering specification for the destination table. Must be specified with time-based
* partitioning, data in the table will be first partitioned and subsequently clustered.
* @param clustering clustering or {@code null} for none
*/
public JobConfigurationQuery setClustering(Clustering clustering) {
this.clustering = clustering;
return this;
}
/**
* Connection properties.
* @return value or {@code null} for none
*/
public java.util.List<ConnectionProperty> getConnectionProperties() {
return connectionProperties;
}
/**
* Connection properties.
* @param connectionProperties connectionProperties or {@code null} for none
*/
public JobConfigurationQuery setConnectionProperties(java.util.List<ConnectionProperty> connectionProperties) {
this.connectionProperties = connectionProperties;
return this;
}
/**
* [Optional] Specifies whether the job is allowed to create new tables. The following values are
* supported: CREATE_IF_NEEDED: If the table does not exist, BigQuery creates the table.
* CREATE_NEVER: The table must already exist. If it does not, a 'notFound' error is returned in
* the job result. The default value is CREATE_IF_NEEDED. Creation, truncation and append actions
* occur as one atomic update upon job completion.
* @return value or {@code null} for none
*/
public java.lang.String getCreateDisposition() {
return createDisposition;
}
/**
* [Optional] Specifies whether the job is allowed to create new tables. The following values are
* supported: CREATE_IF_NEEDED: If the table does not exist, BigQuery creates the table.
* CREATE_NEVER: The table must already exist. If it does not, a 'notFound' error is returned in
* the job result. The default value is CREATE_IF_NEEDED. Creation, truncation and append actions
* occur as one atomic update upon job completion.
* @param createDisposition createDisposition or {@code null} for none
*/
public JobConfigurationQuery setCreateDisposition(java.lang.String createDisposition) {
this.createDisposition = createDisposition;
return this;
}
/**
* [Optional] Specifies the default dataset to use for unqualified table names in the query. Note
* that this does not alter behavior of unqualified dataset names.
* @return value or {@code null} for none
*/
public DatasetReference getDefaultDataset() {
return defaultDataset;
}
/**
* [Optional] Specifies the default dataset to use for unqualified table names in the query. Note
* that this does not alter behavior of unqualified dataset names.
* @param defaultDataset defaultDataset or {@code null} for none
*/
public JobConfigurationQuery setDefaultDataset(DatasetReference defaultDataset) {
this.defaultDataset = defaultDataset;
return this;
}
/**
* Custom encryption configuration (e.g., Cloud KMS keys).
* @return value or {@code null} for none
*/
public EncryptionConfiguration getDestinationEncryptionConfiguration() {
return destinationEncryptionConfiguration;
}
/**
* Custom encryption configuration (e.g., Cloud KMS keys).
* @param destinationEncryptionConfiguration destinationEncryptionConfiguration or {@code null} for none
*/
public JobConfigurationQuery setDestinationEncryptionConfiguration(EncryptionConfiguration destinationEncryptionConfiguration) {
this.destinationEncryptionConfiguration = destinationEncryptionConfiguration;
return this;
}
/**
* [Optional] Describes the table where the query results should be stored. If not present, a new
* table will be created to store the results. This property must be set for large results that
* exceed the maximum response size.
* @return value or {@code null} for none
*/
public TableReference getDestinationTable() {
return destinationTable;
}
/**
* [Optional] Describes the table where the query results should be stored. If not present, a new
* table will be created to store the results. This property must be set for large results that
* exceed the maximum response size.
* @param destinationTable destinationTable or {@code null} for none
*/
public JobConfigurationQuery setDestinationTable(TableReference destinationTable) {
this.destinationTable = destinationTable;
return this;
}
/**
* [Optional] If true and query uses legacy SQL dialect, flattens all nested and repeated fields
* in the query results. allowLargeResults must be true if this is set to false. For standard SQL
* queries, this flag is ignored and results are never flattened.
* @return value or {@code null} for none
*/
public java.lang.Boolean getFlattenResults() {
return flattenResults;
}
/**
* [Optional] If true and query uses legacy SQL dialect, flattens all nested and repeated fields
* in the query results. allowLargeResults must be true if this is set to false. For standard SQL
* queries, this flag is ignored and results are never flattened.
* @param flattenResults flattenResults or {@code null} for none
*/
public JobConfigurationQuery setFlattenResults(java.lang.Boolean flattenResults) {
this.flattenResults = flattenResults;
return this;
}
/**
* Convenience method that returns only {@link Boolean#TRUE} or {@link Boolean#FALSE}.
*
* <p>
* Boolean properties can have four possible values:
* {@code null}, {@link com.google.api.client.util.Data#NULL_BOOLEAN}, {@link Boolean#TRUE}
* or {@link Boolean#FALSE}.
* </p>
*
* <p>
* This method returns {@link Boolean#TRUE} if the default of the property is {@link Boolean#TRUE}
* and it is {@code null} or {@link com.google.api.client.util.Data#NULL_BOOLEAN}.
* {@link Boolean#FALSE} is returned if the default of the property is {@link Boolean#FALSE} and
* it is {@code null} or {@link com.google.api.client.util.Data#NULL_BOOLEAN}.
* </p>
*
* <p>
*[ Optional] If true and query uses legacy SQL dialect, flattens all nested and repeated fields in
[ the query results. allowLargeResults must be true if this is set to false. For standard SQL
[ queries, this flag is ignored and results are never flattened.
* </p>
*/
public boolean isFlattenResults() {
if (flattenResults == null || flattenResults == com.google.api.client.util.Data.NULL_BOOLEAN) {
return true;
}
return flattenResults;
}
/**
* [Optional] Limits the billing tier for this job. Queries that have resource usage beyond this
* tier will fail (without incurring a charge). If unspecified, this will be set to your project
* default.
* @return value or {@code null} for none
*/
public java.lang.Integer getMaximumBillingTier() {
return maximumBillingTier;
}
/**
* [Optional] Limits the billing tier for this job. Queries that have resource usage beyond this
* tier will fail (without incurring a charge). If unspecified, this will be set to your project
* default.
* @param maximumBillingTier maximumBillingTier or {@code null} for none
*/
public JobConfigurationQuery setMaximumBillingTier(java.lang.Integer maximumBillingTier) {
this.maximumBillingTier = maximumBillingTier;
return this;
}
/**
* [Optional] Limits the bytes billed for this job. Queries that will have bytes billed beyond
* this limit will fail (without incurring a charge). If unspecified, this will be set to your
* project default.
* @return value or {@code null} for none
*/
public java.lang.Long getMaximumBytesBilled() {
return maximumBytesBilled;
}
/**
* [Optional] Limits the bytes billed for this job. Queries that will have bytes billed beyond
* this limit will fail (without incurring a charge). If unspecified, this will be set to your
* project default.
* @param maximumBytesBilled maximumBytesBilled or {@code null} for none
*/
public JobConfigurationQuery setMaximumBytesBilled(java.lang.Long maximumBytesBilled) {
this.maximumBytesBilled = maximumBytesBilled;
return this;
}
/**
* Standard SQL only. Set to POSITIONAL to use positional (?) query parameters or to NAMED to use
* named (@myparam) query parameters in this query.
* @return value or {@code null} for none
*/
public java.lang.String getParameterMode() {
return parameterMode;
}
/**
* Standard SQL only. Set to POSITIONAL to use positional (?) query parameters or to NAMED to use
* named (@myparam) query parameters in this query.
* @param parameterMode parameterMode or {@code null} for none
*/
public JobConfigurationQuery setParameterMode(java.lang.String parameterMode) {
this.parameterMode = parameterMode;
return this;
}
/**
* [Deprecated] This property is deprecated.
* @return value or {@code null} for none
*/
public java.lang.Boolean getPreserveNulls() {
return preserveNulls;
}
/**
* [Deprecated] This property is deprecated.
* @param preserveNulls preserveNulls or {@code null} for none
*/
public JobConfigurationQuery setPreserveNulls(java.lang.Boolean preserveNulls) {
this.preserveNulls = preserveNulls;
return this;
}
/**
* [Optional] Specifies a priority for the query. Possible values include INTERACTIVE and BATCH.
* The default value is INTERACTIVE.
* @return value or {@code null} for none
*/
public java.lang.String getPriority() {
return priority;
}
/**
* [Optional] Specifies a priority for the query. Possible values include INTERACTIVE and BATCH.
* The default value is INTERACTIVE.
* @param priority priority or {@code null} for none
*/
public JobConfigurationQuery setPriority(java.lang.String priority) {
this.priority = priority;
return this;
}
/**
* [Required] SQL query text to execute. The useLegacySql field can be used to indicate whether
* the query uses legacy SQL or standard SQL.
* @return value or {@code null} for none
*/
public java.lang.String getQuery() {
return query;
}
/**
* [Required] SQL query text to execute. The useLegacySql field can be used to indicate whether
* the query uses legacy SQL or standard SQL.
* @param query query or {@code null} for none
*/
public JobConfigurationQuery setQuery(java.lang.String query) {
this.query = query;
return this;
}
/**
* Query parameters for standard SQL queries.
* @return value or {@code null} for none
*/
public java.util.List<QueryParameter> getQueryParameters() {
return queryParameters;
}
/**
* Query parameters for standard SQL queries.
* @param queryParameters queryParameters or {@code null} for none
*/
public JobConfigurationQuery setQueryParameters(java.util.List<QueryParameter> queryParameters) {
this.queryParameters = queryParameters;
return this;
}
/**
* [TrustedTester] Range partitioning specification for this table. Only one of timePartitioning
* and rangePartitioning should be specified.
* @return value or {@code null} for none
*/
public RangePartitioning getRangePartitioning() {
return rangePartitioning;
}
/**
* [TrustedTester] Range partitioning specification for this table. Only one of timePartitioning
* and rangePartitioning should be specified.
* @param rangePartitioning rangePartitioning or {@code null} for none
*/
public JobConfigurationQuery setRangePartitioning(RangePartitioning rangePartitioning) {
this.rangePartitioning = rangePartitioning;
return this;
}
/**
* Allows the schema of the destination table to be updated as a side effect of the query job.
* Schema update options are supported in two cases: when writeDisposition is WRITE_APPEND; when
* writeDisposition is WRITE_TRUNCATE and the destination table is a partition of a table,
* specified by partition decorators. For normal tables, WRITE_TRUNCATE will always overwrite the
* schema. One or more of the following values are specified: ALLOW_FIELD_ADDITION: allow adding a
* nullable field to the schema. ALLOW_FIELD_RELAXATION: allow relaxing a required field in the
* original schema to nullable.
* @return value or {@code null} for none
*/
public java.util.List<java.lang.String> getSchemaUpdateOptions() {
return schemaUpdateOptions;
}
/**
* Allows the schema of the destination table to be updated as a side effect of the query job.
* Schema update options are supported in two cases: when writeDisposition is WRITE_APPEND; when
* writeDisposition is WRITE_TRUNCATE and the destination table is a partition of a table,
* specified by partition decorators. For normal tables, WRITE_TRUNCATE will always overwrite the
* schema. One or more of the following values are specified: ALLOW_FIELD_ADDITION: allow adding a
* nullable field to the schema. ALLOW_FIELD_RELAXATION: allow relaxing a required field in the
* original schema to nullable.
* @param schemaUpdateOptions schemaUpdateOptions or {@code null} for none
*/
public JobConfigurationQuery setSchemaUpdateOptions(java.util.List<java.lang.String> schemaUpdateOptions) {
this.schemaUpdateOptions = schemaUpdateOptions;
return this;
}
/**
* [Optional] If querying an external data source outside of BigQuery, describes the data format,
* location and other properties of the data source. By defining these properties, the data source
* can then be queried as if it were a standard BigQuery table.
* @return value or {@code null} for none
*/
public java.util.Map<String, ExternalDataConfiguration> getTableDefinitions() {
return tableDefinitions;
}
/**
* [Optional] If querying an external data source outside of BigQuery, describes the data format,
* location and other properties of the data source. By defining these properties, the data source
* can then be queried as if it were a standard BigQuery table.
* @param tableDefinitions tableDefinitions or {@code null} for none
*/
public JobConfigurationQuery setTableDefinitions(java.util.Map<String, ExternalDataConfiguration> tableDefinitions) {
this.tableDefinitions = tableDefinitions;
return this;
}
/**
* Time-based partitioning specification for the destination table. Only one of timePartitioning
* and rangePartitioning should be specified.
* @return value or {@code null} for none
*/
public TimePartitioning getTimePartitioning() {
return timePartitioning;
}
/**
* Time-based partitioning specification for the destination table. Only one of timePartitioning
* and rangePartitioning should be specified.
* @param timePartitioning timePartitioning or {@code null} for none
*/
public JobConfigurationQuery setTimePartitioning(TimePartitioning timePartitioning) {
this.timePartitioning = timePartitioning;
return this;
}
/**
* Specifies whether to use BigQuery's legacy SQL dialect for this query. The default value is
* true. If set to false, the query will use BigQuery's standard SQL:
* https://cloud.google.com/bigquery/sql-reference/ When useLegacySql is set to false, the value
* of flattenResults is ignored; query will be run as if flattenResults is false.
* @return value or {@code null} for none
*/
public java.lang.Boolean getUseLegacySql() {
return useLegacySql;
}
/**
* Specifies whether to use BigQuery's legacy SQL dialect for this query. The default value is
* true. If set to false, the query will use BigQuery's standard SQL:
* https://cloud.google.com/bigquery/sql-reference/ When useLegacySql is set to false, the value
* of flattenResults is ignored; query will be run as if flattenResults is false.
* @param useLegacySql useLegacySql or {@code null} for none
*/
public JobConfigurationQuery setUseLegacySql(java.lang.Boolean useLegacySql) {
this.useLegacySql = useLegacySql;
return this;
}
/**
* Convenience method that returns only {@link Boolean#TRUE} or {@link Boolean#FALSE}.
*
* <p>
* Boolean properties can have four possible values:
* {@code null}, {@link com.google.api.client.util.Data#NULL_BOOLEAN}, {@link Boolean#TRUE}
* or {@link Boolean#FALSE}.
* </p>
*
* <p>
* This method returns {@link Boolean#TRUE} if the default of the property is {@link Boolean#TRUE}
* and it is {@code null} or {@link com.google.api.client.util.Data#NULL_BOOLEAN}.
* {@link Boolean#FALSE} is returned if the default of the property is {@link Boolean#FALSE} and
* it is {@code null} or {@link com.google.api.client.util.Data#NULL_BOOLEAN}.
* </p>
*
* <p>
* Specifies whether to use BigQuery's legacy SQL dialect for this query. The default value is true.
If set to false, the query will use BigQuery's standard SQL: https://cloud.google.com/bigquery/sql-
reference/ When useLegacySql is set to false, the value of flattenResults is ignored; query will be
run as if flattenResults is false.
* </p>
*/
public boolean isUseLegacySql() {
if (useLegacySql == null || useLegacySql == com.google.api.client.util.Data.NULL_BOOLEAN) {
return true;
}
return useLegacySql;
}
/**
* [Optional] Whether to look for the result in the query cache. The query cache is a best-effort
* cache that will be flushed whenever tables in the query are modified. Moreover, the query cache
* is only available when a query does not have a destination table specified. The default value
* is true.
* @return value or {@code null} for none
*/
public java.lang.Boolean getUseQueryCache() {
return useQueryCache;
}
/**
* [Optional] Whether to look for the result in the query cache. The query cache is a best-effort
* cache that will be flushed whenever tables in the query are modified. Moreover, the query cache
* is only available when a query does not have a destination table specified. The default value
* is true.
* @param useQueryCache useQueryCache or {@code null} for none
*/
public JobConfigurationQuery setUseQueryCache(java.lang.Boolean useQueryCache) {
this.useQueryCache = useQueryCache;
return this;
}
/**
* Convenience method that returns only {@link Boolean#TRUE} or {@link Boolean#FALSE}.
*
* <p>
* Boolean properties can have four possible values:
* {@code null}, {@link com.google.api.client.util.Data#NULL_BOOLEAN}, {@link Boolean#TRUE}
* or {@link Boolean#FALSE}.
* </p>
*
* <p>
* This method returns {@link Boolean#TRUE} if the default of the property is {@link Boolean#TRUE}
* and it is {@code null} or {@link com.google.api.client.util.Data#NULL_BOOLEAN}.
* {@link Boolean#FALSE} is returned if the default of the property is {@link Boolean#FALSE} and
* it is {@code null} or {@link com.google.api.client.util.Data#NULL_BOOLEAN}.
* </p>
*
* <p>
*[ Optional] Whether to look for the result in the query cache. The query cache is a best-effort
[ cache that will be flushed whenever tables in the query are modified. Moreover, the query cache is
[ only available when a query does not have a destination table specified. The default value is
[ true.
* </p>
*/
public boolean isUseQueryCache() {
if (useQueryCache == null || useQueryCache == com.google.api.client.util.Data.NULL_BOOLEAN) {
return true;
}
return useQueryCache;
}
/**
* Describes user-defined function resources used in the query.
* @return value or {@code null} for none
*/
public java.util.List<UserDefinedFunctionResource> getUserDefinedFunctionResources() {
return userDefinedFunctionResources;
}
/**
* Describes user-defined function resources used in the query.
* @param userDefinedFunctionResources userDefinedFunctionResources or {@code null} for none
*/
public JobConfigurationQuery setUserDefinedFunctionResources(java.util.List<UserDefinedFunctionResource> userDefinedFunctionResources) {
this.userDefinedFunctionResources = userDefinedFunctionResources;
return this;
}
/**
* [Optional] Specifies the action that occurs if the destination table already exists. The
* following values are supported: WRITE_TRUNCATE: If the table already exists, BigQuery
* overwrites the table data and uses the schema from the query result. WRITE_APPEND: If the table
* already exists, BigQuery appends the data to the table. WRITE_EMPTY: If the table already
* exists and contains data, a 'duplicate' error is returned in the job result. The default value
* is WRITE_EMPTY. Each action is atomic and only occurs if BigQuery is able to complete the job
* successfully. Creation, truncation and append actions occur as one atomic update upon job
* completion.
* @return value or {@code null} for none
*/
public java.lang.String getWriteDisposition() {
return writeDisposition;
}
/**
* [Optional] Specifies the action that occurs if the destination table already exists. The
* following values are supported: WRITE_TRUNCATE: If the table already exists, BigQuery
* overwrites the table data and uses the schema from the query result. WRITE_APPEND: If the table
* already exists, BigQuery appends the data to the table. WRITE_EMPTY: If the table already
* exists and contains data, a 'duplicate' error is returned in the job result. The default value
* is WRITE_EMPTY. Each action is atomic and only occurs if BigQuery is able to complete the job
* successfully. Creation, truncation and append actions occur as one atomic update upon job
* completion.
* @param writeDisposition writeDisposition or {@code null} for none
*/
public JobConfigurationQuery setWriteDisposition(java.lang.String writeDisposition) {
this.writeDisposition = writeDisposition;
return this;
}
@Override
public JobConfigurationQuery set(String fieldName, Object value) {
return (JobConfigurationQuery) super.set(fieldName, value);
}
@Override
public JobConfigurationQuery clone() {
return (JobConfigurationQuery) super.clone();
}
}
|
apache/felix-dev | 33,338 | framework/src/main/java/org/osgi/util/tracker/ServiceTracker.java | /*
* Copyright (c) OSGi Alliance (2000, 2017). All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.osgi.util.tracker;
import java.lang.reflect.Array;
import java.util.Collections;
import java.util.SortedMap;
import java.util.TreeMap;
import java.util.concurrent.TimeUnit;
import org.osgi.annotation.versioning.ConsumerType;
import org.osgi.framework.AllServiceListener;
import org.osgi.framework.BundleContext;
import org.osgi.framework.Constants;
import org.osgi.framework.Filter;
import org.osgi.framework.InvalidSyntaxException;
import org.osgi.framework.ServiceEvent;
import org.osgi.framework.ServiceListener;
import org.osgi.framework.ServiceReference;
/**
* The {@code ServiceTracker} class simplifies using services from the
* Framework's service registry.
* <p>
* A {@code ServiceTracker} object is constructed with search criteria and a
* {@code ServiceTrackerCustomizer} object. A {@code ServiceTracker} can use a
* {@code ServiceTrackerCustomizer} to customize the service objects to be
* tracked. The {@code ServiceTracker} can then be opened to begin tracking all
* services in the Framework's service registry that match the specified search
* criteria. The {@code ServiceTracker} correctly handles all of the details of
* listening to {@code ServiceEvent}s and getting and ungetting services.
* <p>
* The {@code getServiceReferences} method can be called to get references to
* the services being tracked. The {@code getService} and {@code getServices}
* methods can be called to get the service objects for the tracked service.
* <p>
* The {@code ServiceTracker} class is thread-safe. It does not call a
* {@code ServiceTrackerCustomizer} while holding any locks.
* {@code ServiceTrackerCustomizer} implementations must also be thread-safe.
*
* @param <S> The type of the service being tracked.
* @param <T> The type of the tracked object.
* @ThreadSafe
* @author $Id: 3c9016c43c6289259f97470eff4c9986b6fb887a $
*/
@ConsumerType
public class ServiceTracker<S, T> implements ServiceTrackerCustomizer<S, T> {
/* set this to true to compile in debug messages */
static final boolean DEBUG = false;
/**
* The Bundle Context used by this {@code ServiceTracker}.
*/
protected final BundleContext context;
/**
* The Filter used by this {@code ServiceTracker} which specifies the search
* criteria for the services to track.
*
* @since 1.1
*/
protected final Filter filter;
/**
* The {@code ServiceTrackerCustomizer} for this tracker.
*/
final ServiceTrackerCustomizer<S, T> customizer;
/**
* Filter string for use when adding the ServiceListener. If this field is
* set, then certain optimizations can be taken since we don't have a user
* supplied filter.
*/
final String listenerFilter;
/**
* Class name to be tracked. If this field is set, then we are tracking by
* class name.
*/
private final String trackClass;
/**
* Reference to be tracked. If this field is set, then we are tracking a
* single ServiceReference.
*/
private final ServiceReference<S> trackReference;
/**
* Tracked services: {@code ServiceReference} -> customized Object and
* {@code ServiceListener} object
*/
private volatile Tracked tracked;
/**
* Accessor method for the current Tracked object. This method is only
* intended to be used by the unsynchronized methods which do not modify the
* tracked field.
*
* @return The current Tracked object.
*/
private Tracked tracked() {
return tracked;
}
/**
* Cached ServiceReference for getServiceReference.
*
* This field is volatile since it is accessed by multiple threads.
*/
private volatile ServiceReference<S> cachedReference;
/**
* Cached service object for getService.
*
* This field is volatile since it is accessed by multiple threads.
*/
private volatile T cachedService;
/**
* Create a {@code ServiceTracker} on the specified {@code ServiceReference}
* .
*
* <p>
* The service referenced by the specified {@code ServiceReference} will be
* tracked by this {@code ServiceTracker}.
*
* @param context The {@code BundleContext} against which the tracking is
* done.
* @param reference The {@code ServiceReference} for the service to be
* tracked.
* @param customizer The customizer object to call when services are added,
* modified, or removed in this {@code ServiceTracker}. If customizer
* is {@code null}, then this {@code ServiceTracker} will be used as
* the {@code ServiceTrackerCustomizer} and this
* {@code ServiceTracker} will call the
* {@code ServiceTrackerCustomizer} methods on itself.
*/
public ServiceTracker(final BundleContext context, final ServiceReference<S> reference, final ServiceTrackerCustomizer<S, T> customizer) {
this.context = context;
this.trackReference = reference;
this.trackClass = null;
this.customizer = (customizer == null) ? this : customizer;
this.listenerFilter = "(" + Constants.SERVICE_ID + "=" + reference.getProperty(Constants.SERVICE_ID).toString() + ")";
try {
this.filter = context.createFilter(listenerFilter);
} catch (InvalidSyntaxException e) {
/*
* we could only get this exception if the ServiceReference was
* invalid
*/
IllegalArgumentException iae = new IllegalArgumentException("unexpected InvalidSyntaxException: " + e.getMessage());
iae.initCause(e);
throw iae;
}
}
/**
* Create a {@code ServiceTracker} on the specified class name.
*
* <p>
* Services registered under the specified class name will be tracked by
* this {@code ServiceTracker}.
*
* @param context The {@code BundleContext} against which the tracking is
* done.
* @param clazz The class name of the services to be tracked.
* @param customizer The customizer object to call when services are added,
* modified, or removed in this {@code ServiceTracker}. If customizer
* is {@code null}, then this {@code ServiceTracker} will be used as
* the {@code ServiceTrackerCustomizer} and this
* {@code ServiceTracker} will call the
* {@code ServiceTrackerCustomizer} methods on itself.
*/
public ServiceTracker(final BundleContext context, final String clazz, final ServiceTrackerCustomizer<S, T> customizer) {
this.context = context;
this.trackReference = null;
this.trackClass = clazz;
this.customizer = (customizer == null) ? this : customizer;
// we call clazz.toString to verify clazz is non-null!
this.listenerFilter = "(" + Constants.OBJECTCLASS + "=" + clazz.toString() + ")";
try {
this.filter = context.createFilter(listenerFilter);
} catch (InvalidSyntaxException e) {
/*
* we could only get this exception if the clazz argument was
* malformed
*/
IllegalArgumentException iae = new IllegalArgumentException("unexpected InvalidSyntaxException: " + e.getMessage());
iae.initCause(e);
throw iae;
}
}
/**
* Create a {@code ServiceTracker} on the specified {@code Filter} object.
*
* <p>
* Services which match the specified {@code Filter} object will be tracked
* by this {@code ServiceTracker}.
*
* @param context The {@code BundleContext} against which the tracking is
* done.
* @param filter The {@code Filter} to select the services to be tracked.
* @param customizer The customizer object to call when services are added,
* modified, or removed in this {@code ServiceTracker}. If customizer
* is null, then this {@code ServiceTracker} will be used as the
* {@code ServiceTrackerCustomizer} and this {@code ServiceTracker}
* will call the {@code ServiceTrackerCustomizer} methods on itself.
* @since 1.1
*/
public ServiceTracker(final BundleContext context, final Filter filter, final ServiceTrackerCustomizer<S, T> customizer) {
this.context = context;
this.trackReference = null;
this.trackClass = null;
this.listenerFilter = filter.toString();
this.filter = filter;
this.customizer = (customizer == null) ? this : customizer;
if ((context == null) || (filter == null)) {
/*
* we throw a NPE here to be consistent with the other constructors
*/
throw new NullPointerException();
}
}
/**
* Create a {@code ServiceTracker} on the specified class.
*
* <p>
* Services registered under the name of the specified class will be tracked
* by this {@code ServiceTracker}.
*
* @param context The {@code BundleContext} against which the tracking is
* done.
* @param clazz The class of the services to be tracked.
* @param customizer The customizer object to call when services are added,
* modified, or removed in this {@code ServiceTracker}. If customizer
* is {@code null}, then this {@code ServiceTracker} will be used as
* the {@code ServiceTrackerCustomizer} and this
* {@code ServiceTracker} will call the
* {@code ServiceTrackerCustomizer} methods on itself.
* @since 1.5
*/
public ServiceTracker(final BundleContext context, final Class<S> clazz, final ServiceTrackerCustomizer<S, T> customizer) {
this(context, clazz.getName(), customizer);
}
/**
* Open this {@code ServiceTracker} and begin tracking services.
*
* <p>
* This implementation calls {@code open(false)}.
*
* @throws java.lang.IllegalStateException If the {@code BundleContext} with
* which this {@code ServiceTracker} was created is no longer valid.
* @see #open(boolean)
*/
public void open() {
open(false);
}
/**
* Open this {@code ServiceTracker} and begin tracking services.
*
* <p>
* Services which match the search criteria specified when this
* {@code ServiceTracker} was created are now tracked by this
* {@code ServiceTracker}.
*
* @param trackAllServices If {@code true}, then this {@code ServiceTracker}
* will track all matching services regardless of class loader
* accessibility. If {@code false}, then this {@code ServiceTracker}
* will only track matching services which are class loader
* accessible to the bundle whose {@code BundleContext} is used by
* this {@code ServiceTracker}.
* @throws java.lang.IllegalStateException If the {@code BundleContext} with
* which this {@code ServiceTracker} was created is no longer valid.
* @since 1.3
*/
public void open(boolean trackAllServices) {
final Tracked t;
synchronized (this) {
if (tracked != null) {
return;
}
if (DEBUG) {
System.out.println("ServiceTracker.open: " + filter);
}
t = trackAllServices ? new AllTracked() : new Tracked();
synchronized (t) {
try {
context.addServiceListener(t, listenerFilter);
ServiceReference<S>[] references = null;
if (trackClass != null) {
references = getInitialReferences(trackAllServices, trackClass, null);
} else {
if (trackReference != null) {
if (trackReference.getBundle() != null) {
@SuppressWarnings("unchecked")
ServiceReference<S>[] single = new ServiceReference[] {trackReference};
references = single;
}
} else { /* user supplied filter */
references = getInitialReferences(trackAllServices, null, listenerFilter);
}
}
/* set tracked with the initial references */
t.setInitial(references);
} catch (InvalidSyntaxException e) {
throw new RuntimeException("unexpected InvalidSyntaxException: " + e.getMessage(), e);
}
}
tracked = t;
}
/* Call tracked outside of synchronized region */
t.trackInitial(); /* process the initial references */
}
/**
* Returns the list of initial {@code ServiceReference}s that will be
* tracked by this {@code ServiceTracker}.
*
* @param trackAllServices If {@code true}, use
* {@code getAllServiceReferences}.
* @param className The class name with which the service was registered, or
* {@code null} for all services.
* @param filterString The filter criteria or {@code null} for all services.
* @return The list of initial {@code ServiceReference}s.
* @throws InvalidSyntaxException If the specified filterString has an
* invalid syntax.
*/
private ServiceReference<S>[] getInitialReferences(boolean trackAllServices, String className, String filterString) throws InvalidSyntaxException {
@SuppressWarnings("unchecked")
ServiceReference<S>[] result = (ServiceReference<S>[]) ((trackAllServices) ? context.getAllServiceReferences(className, filterString) : context.getServiceReferences(className, filterString));
return result;
}
/**
* Close this {@code ServiceTracker}.
*
* <p>
* This method should be called when this {@code ServiceTracker} should end
* the tracking of services.
*
* <p>
* This implementation calls {@link #getServiceReferences()} to get the list
* of tracked services to remove.
*/
public void close() {
final Tracked outgoing;
final ServiceReference<S>[] references;
synchronized (this) {
outgoing = tracked;
if (outgoing == null) {
return;
}
if (DEBUG) {
System.out.println("ServiceTracker.close: " + filter);
}
outgoing.close();
references = getServiceReferences();
tracked = null;
try {
context.removeServiceListener(outgoing);
} catch (IllegalStateException e) {
/* In case the context was stopped. */
}
}
modified(); /* clear the cache */
synchronized (outgoing) {
outgoing.notifyAll(); /* wake up any waiters */
}
if (references != null) {
for (int i = 0; i < references.length; i++) {
outgoing.untrack(references[i], null);
}
}
if (DEBUG) {
if ((cachedReference == null) && (cachedService == null)) {
System.out.println("ServiceTracker.close[cached cleared]: " + filter);
}
}
}
/**
* Default implementation of the
* {@code ServiceTrackerCustomizer.addingService} method.
* <p>
* This method is only called when this {@code ServiceTracker} has been
* constructed with a {@code null ServiceTrackerCustomizer} argument.
* <p>
* This implementation returns the result of calling {@code getService}, on
* the {@code BundleContext} with which this {@code ServiceTracker} was
* created, passing the specified {@code ServiceReference}.
* <p>
* This method can be overridden in a subclass to customize the service
* object to be tracked for the service being added. In that case, take care
* not to rely on the default implementation of
* {@link #removedService(ServiceReference, Object) removedService} to unget
* the service.
*
* @param reference The reference to the service being added to this
* {@code ServiceTracker}.
* @return The service object to be tracked for the service added to this
* {@code ServiceTracker}.
* @see ServiceTrackerCustomizer#addingService(ServiceReference)
*/
@Override
public T addingService(ServiceReference<S> reference) {
@SuppressWarnings("unchecked")
T result = (T) context.getService(reference);
return result;
}
/**
* Default implementation of the
* {@code ServiceTrackerCustomizer.modifiedService} method.
*
* <p>
* This method is only called when this {@code ServiceTracker} has been
* constructed with a {@code null ServiceTrackerCustomizer} argument.
*
* <p>
* This implementation does nothing.
*
* @param reference The reference to modified service.
* @param service The service object for the modified service.
* @see ServiceTrackerCustomizer#modifiedService(ServiceReference, Object)
*/
@Override
public void modifiedService(ServiceReference<S> reference, T service) {
/* do nothing */
}
/**
* Default implementation of the
* {@code ServiceTrackerCustomizer.removedService} method.
* <p>
* This method is only called when this {@code ServiceTracker} has been
* constructed with a {@code null ServiceTrackerCustomizer} argument.
* <p>
* This implementation calls {@code ungetService}, on the
* {@code BundleContext} with which this {@code ServiceTracker} was created,
* passing the specified {@code ServiceReference}.
* <p>
* This method can be overridden in a subclass. If the default
* implementation of {@link #addingService(ServiceReference) addingService}
* method was used, this method must unget the service.
*
* @param reference The reference to removed service.
* @param service The service object for the removed service.
* @see ServiceTrackerCustomizer#removedService(ServiceReference, Object)
*/
@Override
public void removedService(ServiceReference<S> reference, T service) {
context.ungetService(reference);
}
/**
* Wait for at least one service to be tracked by this
* {@code ServiceTracker}. This method will also return when this
* {@code ServiceTracker} is closed.
*
* <p>
* It is strongly recommended that {@code waitForService} is not used during
* the calling of the {@code BundleActivator} methods.
* {@code BundleActivator} methods are expected to complete in a short
* period of time.
*
* <p>
* This implementation calls {@link #getService()} to determine if a service
* is being tracked.
*
* @param timeout The time interval in milliseconds to wait. If zero, the
* method will wait indefinitely.
* @return Returns the result of {@link #getService()}.
* @throws InterruptedException If another thread has interrupted the
* current thread.
* @throws IllegalArgumentException If the value of timeout is negative.
*/
public T waitForService(long timeout) throws InterruptedException {
if (timeout < 0) {
throw new IllegalArgumentException("timeout value is negative");
}
T object = getService();
if (object != null) {
return object;
}
final long endTime = (timeout == 0) ? 0 : (TimeUnit.NANOSECONDS.toMillis(System.nanoTime()) + timeout);
do {
final Tracked t = tracked();
if (t == null) { /* if ServiceTracker is not open */
return null;
}
synchronized (t) {
if (t.size() == 0) {
t.wait(timeout);
}
}
object = getService();
if (endTime > 0) { // if we have a timeout
timeout = endTime - TimeUnit.NANOSECONDS.toMillis(System.nanoTime());
if (timeout <= 0) { // that has expired
break;
}
}
} while (object == null);
return object;
}
/**
* Return an array of {@code ServiceReference}s for all services being
* tracked by this {@code ServiceTracker}.
*
* @return Array of {@code ServiceReference}s or {@code null} if no services
* are being tracked.
*/
public ServiceReference<S>[] getServiceReferences() {
final Tracked t = tracked();
if (t == null) { /* if ServiceTracker is not open */
return null;
}
synchronized (t) {
if (t.isEmpty()) {
return null;
}
@SuppressWarnings("unchecked")
ServiceReference<S>[] result = new ServiceReference[0];
return t.copyKeys(result);
}
}
/**
* Returns a {@code ServiceReference} for one of the services being tracked
* by this {@code ServiceTracker}.
*
* <p>
* If multiple services are being tracked, the service with the highest
* ranking (as specified in its {@code service.ranking} property) is
* returned. If there is a tie in ranking, the service with the lowest
* service id (as specified in its {@code service.id} property); that is,
* the service that was registered first is returned. This is the same
* algorithm used by {@code BundleContext.getServiceReference}.
*
* <p>
* This implementation calls {@link #getServiceReferences()} to get the list
* of references for the tracked services.
*
* @return A {@code ServiceReference} or {@code null} if no services are
* being tracked.
* @since 1.1
*/
public ServiceReference<S> getServiceReference() {
ServiceReference<S> reference = cachedReference;
if (reference != null) {
if (DEBUG) {
System.out.println("ServiceTracker.getServiceReference[cached]: " + filter);
}
return reference;
}
if (DEBUG) {
System.out.println("ServiceTracker.getServiceReference: " + filter);
}
ServiceReference<S>[] references = getServiceReferences();
int length = (references == null) ? 0 : references.length;
if (length == 0) { /* if no service is being tracked */
return null;
}
int index = 0;
if (length > 1) { /* if more than one service, select highest ranking */
int rankings[] = new int[length];
int count = 0;
int maxRanking = Integer.MIN_VALUE;
for (int i = 0; i < length; i++) {
Object property = references[i].getProperty(Constants.SERVICE_RANKING);
int ranking = (property instanceof Integer) ? ((Integer) property).intValue() : 0;
rankings[i] = ranking;
if (ranking > maxRanking) {
index = i;
maxRanking = ranking;
count = 1;
} else {
if (ranking == maxRanking) {
count++;
}
}
}
if (count > 1) { /* if still more than one service, select lowest id */
long minId = Long.MAX_VALUE;
for (int i = 0; i < length; i++) {
if (rankings[i] == maxRanking) {
long id = ((Long) (references[i].getProperty(Constants.SERVICE_ID))).longValue();
if (id < minId) {
index = i;
minId = id;
}
}
}
}
}
return cachedReference = references[index];
}
/**
* Returns the service object for the specified {@code ServiceReference} if
* the specified referenced service is being tracked by this
* {@code ServiceTracker}.
*
* @param reference The reference to the desired service.
* @return A service object or {@code null} if the service referenced by the
* specified {@code ServiceReference} is not being tracked.
*/
public T getService(ServiceReference<S> reference) {
final Tracked t = tracked();
if (t == null) { /* if ServiceTracker is not open */
return null;
}
synchronized (t) {
return t.getCustomizedObject(reference);
}
}
/**
* Return an array of service objects for all services being tracked by this
* {@code ServiceTracker}.
*
* <p>
* This implementation calls {@link #getServiceReferences()} to get the list
* of references for the tracked services and then calls
* {@link #getService(ServiceReference)} for each reference to get the
* tracked service object.
*
* @return An array of service objects or {@code null} if no services are
* being tracked.
*/
public Object[] getServices() {
final Tracked t = tracked();
if (t == null) { /* if ServiceTracker is not open */
return null;
}
synchronized (t) {
ServiceReference<S>[] references = getServiceReferences();
int length = (references == null) ? 0 : references.length;
if (length == 0) {
return null;
}
Object[] objects = new Object[length];
for (int i = 0; i < length; i++) {
objects[i] = getService(references[i]);
}
return objects;
}
}
/**
* Returns a service object for one of the services being tracked by this
* {@code ServiceTracker}.
*
* <p>
* If any services are being tracked, this implementation returns the result
* of calling {@code getService(getServiceReference())}.
*
* @return A service object or {@code null} if no services are being
* tracked.
*/
public T getService() {
T service = cachedService;
if (service != null) {
if (DEBUG) {
System.out.println("ServiceTracker.getService[cached]: " + filter);
}
return service;
}
if (DEBUG) {
System.out.println("ServiceTracker.getService: " + filter);
}
ServiceReference<S> reference = getServiceReference();
if (reference == null) {
return null;
}
return cachedService = getService(reference);
}
/**
* Remove a service from this {@code ServiceTracker}.
*
* The specified service will be removed from this {@code ServiceTracker}.
* If the specified service was being tracked then the
* {@code ServiceTrackerCustomizer.removedService} method will be called for
* that service.
*
* @param reference The reference to the service to be removed.
*/
public void remove(ServiceReference<S> reference) {
final Tracked t = tracked();
if (t == null) { /* if ServiceTracker is not open */
return;
}
t.untrack(reference, null);
}
/**
* Return the number of services being tracked by this
* {@code ServiceTracker}.
*
* @return The number of services being tracked.
*/
public int size() {
final Tracked t = tracked();
if (t == null) { /* if ServiceTracker is not open */
return 0;
}
synchronized (t) {
return t.size();
}
}
/**
* Returns the tracking count for this {@code ServiceTracker}.
*
* The tracking count is initialized to 0 when this {@code ServiceTracker}
* is opened. Every time a service is added, modified or removed from this
* {@code ServiceTracker}, the tracking count is incremented.
*
* <p>
* The tracking count can be used to determine if this
* {@code ServiceTracker} has added, modified or removed a service by
* comparing a tracking count value previously collected with the current
* tracking count value. If the value has not changed, then no service has
* been added, modified or removed from this {@code ServiceTracker} since
* the previous tracking count was collected.
*
* @since 1.2
* @return The tracking count for this {@code ServiceTracker} or -1 if this
* {@code ServiceTracker} is not open.
*/
public int getTrackingCount() {
final Tracked t = tracked();
if (t == null) { /* if ServiceTracker is not open */
return -1;
}
synchronized (t) {
return t.getTrackingCount();
}
}
/**
* Called by the Tracked object whenever the set of tracked services is
* modified. Clears the cache.
*/
/*
* This method must not be synchronized since it is called by Tracked while
* Tracked is synchronized. We don't want synchronization interactions
* between the listener thread and the user thread.
*/
void modified() {
cachedReference = null; /* clear cached value */
cachedService = null; /* clear cached value */
if (DEBUG) {
System.out.println("ServiceTracker.modified: " + filter);
}
}
/**
* Return a {@code SortedMap} of the {@code ServiceReference}s and service
* objects for all services being tracked by this {@code ServiceTracker}.
* The map is sorted in reverse natural order of {@code ServiceReference}.
* That is, the first entry is the service with the highest ranking and the
* lowest service id.
*
* @return A {@code SortedMap} with the {@code ServiceReference}s and
* service objects for all services being tracked by this
* {@code ServiceTracker}. If no services are being tracked, then
* the returned map is empty.
* @since 1.5
*/
public SortedMap<ServiceReference<S>, T> getTracked() {
SortedMap<ServiceReference<S>, T> map = new TreeMap<ServiceReference<S>, T>(Collections.reverseOrder());
final Tracked t = tracked();
if (t == null) { /* if ServiceTracker is not open */
return map;
}
synchronized (t) {
return t.copyEntries(map);
}
}
/**
* Return if this {@code ServiceTracker} is empty.
*
* @return {@code true} if this {@code ServiceTracker} is not tracking any
* services.
* @since 1.5
*/
public boolean isEmpty() {
final Tracked t = tracked();
if (t == null) { /* if ServiceTracker is not open */
return true;
}
synchronized (t) {
return t.isEmpty();
}
}
/**
* Return an array of service objects for all services being tracked by this
* {@code ServiceTracker}. The runtime type of the returned array is that of
* the specified array.
*
* <p>
* This implementation calls {@link #getServiceReferences()} to get the list
* of references for the tracked services and then calls
* {@link #getService(ServiceReference)} for each reference to get the
* tracked service object.
*
* @param array An array into which the tracked service objects will be
* stored, if the array is large enough.
* @return An array of service objects being tracked. If the specified array
* is large enough to hold the result, then the specified array is
* returned. If the specified array is longer then necessary to hold
* the result, the array element after the last service object is
* set to {@code null}. If the specified array is not large enough
* to hold the result, a new array is created and returned.
* @since 1.5
*/
public T[] getServices(T[] array) {
final Tracked t = tracked();
if (t == null) { /* if ServiceTracker is not open */
if (array.length > 0) {
array[0] = null;
}
return array;
}
synchronized (t) {
ServiceReference<S>[] references = getServiceReferences();
int length = (references == null) ? 0 : references.length;
if (length == 0) {
if (array.length > 0) {
array[0] = null;
}
return array;
}
if (length > array.length) {
@SuppressWarnings("unchecked")
T[] newInstance = (T[]) Array.newInstance(array.getClass().getComponentType(), length);
array = newInstance;
}
for (int i = 0; i < length; i++) {
array[i] = getService(references[i]);
}
if (array.length > length) {
array[length] = null;
}
return array;
}
}
/**
* Inner class which subclasses AbstractTracked. This class is the
* {@code ServiceListener} object for the tracker.
*
* @ThreadSafe
*/
private class Tracked extends AbstractTracked<ServiceReference<S>, T, ServiceEvent> implements ServiceListener {
/**
* Tracked constructor.
*/
Tracked() {
super();
}
/**
* {@code ServiceListener} method for the {@code ServiceTracker} class.
* This method must NOT be synchronized to avoid deadlock potential.
*
* @param event {@code ServiceEvent} object from the framework.
*/
@Override
final public void serviceChanged(final ServiceEvent event) {
/*
* Check if we had a delayed call (which could happen when we
* close).
*/
if (closed) {
return;
}
@SuppressWarnings("unchecked")
final ServiceReference<S> reference = (ServiceReference<S>) event.getServiceReference();
if (DEBUG) {
System.out.println("ServiceTracker.Tracked.serviceChanged[" + event.getType() + "]: " + reference);
}
switch (event.getType()) {
case ServiceEvent.REGISTERED :
case ServiceEvent.MODIFIED :
track(reference, event);
/*
* If the customizer throws an unchecked exception, it is
* safe to let it propagate
*/
break;
case ServiceEvent.MODIFIED_ENDMATCH :
case ServiceEvent.UNREGISTERING :
untrack(reference, event);
/*
* If the customizer throws an unchecked exception, it is
* safe to let it propagate
*/
break;
}
}
/**
* Increment the tracking count and tell the tracker there was a
* modification.
*
* @GuardedBy this
*/
@Override
final void modified() {
super.modified(); /* increment the modification count */
ServiceTracker.this.modified();
}
/**
* Call the specific customizer adding method. This method must not be
* called while synchronized on this object.
*
* @param item Item to be tracked.
* @param related Action related object.
* @return Customized object for the tracked item or {@code null} if the
* item is not to be tracked.
*/
@Override
final T customizerAdding(final ServiceReference<S> item, final ServiceEvent related) {
return customizer.addingService(item);
}
/**
* Call the specific customizer modified method. This method must not be
* called while synchronized on this object.
*
* @param item Tracked item.
* @param related Action related object.
* @param object Customized object for the tracked item.
*/
@Override
final void customizerModified(final ServiceReference<S> item, final ServiceEvent related, final T object) {
customizer.modifiedService(item, object);
}
/**
* Call the specific customizer removed method. This method must not be
* called while synchronized on this object.
*
* @param item Tracked item.
* @param related Action related object.
* @param object Customized object for the tracked item.
*/
@Override
final void customizerRemoved(final ServiceReference<S> item, final ServiceEvent related, final T object) {
customizer.removedService(item, object);
}
}
/**
* Subclass of Tracked which implements the AllServiceListener interface.
* This class is used by the ServiceTracker if open is called with true.
*
* @since 1.3
* @ThreadSafe
*/
private class AllTracked extends Tracked implements AllServiceListener {
/**
* AllTracked constructor.
*/
AllTracked() {
super();
}
}
}
|
apache/juneau | 32,399 | juneau-core/juneau-marshall/src/main/java/org/apache/juneau/json/JsonParserSession.java | // ***************************************************************************************************************************
// * Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file *
// * distributed with this work for additional information regarding copyright ownership. The ASF licenses this file *
// * to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance *
// * with the License. You may obtain a copy of the License at *
// * *
// * http://www.apache.org/licenses/LICENSE-2.0 *
// * *
// * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an *
// * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the *
// * specific language governing permissions and limitations under the License. *
// ***************************************************************************************************************************
package org.apache.juneau.json;
import static org.apache.juneau.common.utils.StringUtils.*;
import static org.apache.juneau.common.utils.Utils.*;
import java.io.*;
import java.lang.reflect.*;
import java.nio.charset.*;
import java.util.*;
import java.util.function.*;
import org.apache.juneau.*;
import org.apache.juneau.collections.*;
import org.apache.juneau.common.utils.*;
import org.apache.juneau.httppart.*;
import org.apache.juneau.internal.*;
import org.apache.juneau.parser.*;
import org.apache.juneau.swap.*;
/**
* Session object that lives for the duration of a single use of {@link JsonParser}.
*
* <h5 class='section'>Notes:</h5><ul>
* <li class='warn'>This class is not thread safe and is typically discarded after one use.
* </ul>
*
* <h5 class='section'>See Also:</h5><ul>
* <li class='link'><a class="doclink" href="https://juneau.apache.org/docs/topics/JsonBasics">JSON Basics</a>
* </ul>
*/
@SuppressWarnings({ "unchecked", "rawtypes" })
public class JsonParserSession extends ReaderParserSession {
//-------------------------------------------------------------------------------------------------------------------
// Static
//-------------------------------------------------------------------------------------------------------------------
private static final AsciiSet decChars = AsciiSet.create().ranges("0-9").build();
/**
* Creates a new builder for this object.
*
* @param ctx The context creating this session.
* @return A new builder.
*/
public static Builder create(JsonParser ctx) {
return new Builder(ctx);
}
//-------------------------------------------------------------------------------------------------------------------
// Builder
//-------------------------------------------------------------------------------------------------------------------
/**
* Builder class.
*/
@FluentSetters
public static class Builder extends ReaderParserSession.Builder {
JsonParser ctx;
/**
* Constructor
*
* @param ctx The context creating this session.
*/
protected Builder(JsonParser ctx) {
super(ctx);
this.ctx = ctx;
}
@Override
public JsonParserSession build() {
return new JsonParserSession(this);
}
// <FluentSetters>
@Override /* GENERATED - org.apache.juneau.ContextSession.Builder */
public <T> Builder apply(Class<T> type, Consumer<T> apply) {
super.apply(type, apply);
return this;
}
@Override /* GENERATED - org.apache.juneau.ContextSession.Builder */
public Builder debug(Boolean value) {
super.debug(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.ContextSession.Builder */
public Builder properties(Map<String,Object> value) {
super.properties(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.ContextSession.Builder */
public Builder property(String key, Object value) {
super.property(key, value);
return this;
}
@Override /* GENERATED - org.apache.juneau.ContextSession.Builder */
public Builder unmodifiable() {
super.unmodifiable();
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanSession.Builder */
public Builder locale(Locale value) {
super.locale(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanSession.Builder */
public Builder localeDefault(Locale value) {
super.localeDefault(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanSession.Builder */
public Builder mediaType(MediaType value) {
super.mediaType(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanSession.Builder */
public Builder mediaTypeDefault(MediaType value) {
super.mediaTypeDefault(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanSession.Builder */
public Builder timeZone(TimeZone value) {
super.timeZone(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanSession.Builder */
public Builder timeZoneDefault(TimeZone value) {
super.timeZoneDefault(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.parser.ParserSession.Builder */
public Builder javaMethod(Method value) {
super.javaMethod(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.parser.ParserSession.Builder */
public Builder outer(Object value) {
super.outer(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.parser.ParserSession.Builder */
public Builder schema(HttpPartSchema value) {
super.schema(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.parser.ParserSession.Builder */
public Builder schemaDefault(HttpPartSchema value) {
super.schemaDefault(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.parser.ReaderParserSession.Builder */
public Builder fileCharset(Charset value) {
super.fileCharset(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.parser.ReaderParserSession.Builder */
public Builder streamCharset(Charset value) {
super.streamCharset(value);
return this;
}
// </FluentSetters>
}
//-------------------------------------------------------------------------------------------------------------------
// Instance
//-------------------------------------------------------------------------------------------------------------------
private final JsonParser ctx;
/**
* Constructor.
*
* @param builder The builder for this object.
*/
protected JsonParserSession(Builder builder) {
super(builder);
ctx = builder.ctx;
}
/**
* Returns <jk>true</jk> if the specified character is whitespace.
*
* <p>
* The definition of whitespace is different for strict vs lax mode.
* Strict mode only interprets 0x20 (space), 0x09 (tab), 0x0A (line feed) and 0x0D (carriage return) as whitespace.
* Lax mode uses {@link Character#isWhitespace(int)} to make the determination.
*
* @param cp The codepoint.
* @return <jk>true</jk> if the specified character is whitespace.
*/
protected boolean isWhitespace(int cp) {
if (isStrict())
return cp <= 0x20 && (cp == 0x09 || cp == 0x0A || cp == 0x0D || cp == 0x20);
return Character.isWhitespace(cp);
}
/**
* Returns <jk>true</jk> if the specified character is whitespace or '/'.
*
* @param cp The codepoint.
* @return <jk>true</jk> if the specified character is whitespace or '/'.
*/
protected boolean isCommentOrWhitespace(int cp) {
if (cp == '/')
return true;
if (isStrict())
return cp <= 0x20 && (cp == 0x09 || cp == 0x0A || cp == 0x0D || cp == 0x20);
return Character.isWhitespace(cp);
}
@Override /* ParserSession */
protected <T> T doParse(ParserPipe pipe, ClassMeta<T> type) throws IOException, ParseException, ExecutableException {
try (ParserReader r = pipe.getParserReader()) {
if (r == null)
return null;
T o = parseAnything(type, r, getOuter(), null);
validateEnd(r);
return o;
}
}
@Override /* ReaderParserSession */
protected <K,V> Map<K,V> doParseIntoMap(ParserPipe pipe, Map<K,V> m, Type keyType, Type valueType) throws IOException, ParseException, ExecutableException {
try (ParserReader r = pipe.getParserReader()) {
m = parseIntoMap2(r, m, (ClassMeta<K>)getClassMeta(keyType), (ClassMeta<V>)getClassMeta(valueType), null);
validateEnd(r);
return m;
}
}
@Override /* ReaderParserSession */
protected <E> Collection<E> doParseIntoCollection(ParserPipe pipe, Collection<E> c, Type elementType) throws IOException, ParseException, ExecutableException {
try (ParserReader r = pipe.getParserReader()) {
c = parseIntoCollection2(r, c, getClassMeta(elementType), null);
validateEnd(r);
return c;
}
}
private <T> T parseAnything(ClassMeta<?> eType, ParserReader r, Object outer, BeanPropertyMeta pMeta) throws IOException, ParseException, ExecutableException {
if (eType == null)
eType = object();
ObjectSwap<T,Object> swap = (ObjectSwap<T,Object>)eType.getSwap(this);
BuilderSwap<T,Object> builder = (BuilderSwap<T,Object>)eType.getBuilderSwap(this);
ClassMeta<?> sType = null;
if (builder != null)
sType = builder.getBuilderClassMeta(this);
else if (swap != null)
sType = swap.getSwapClassMeta(this);
else
sType = eType;
if (sType.isOptional())
return (T)Utils.opt(parseAnything(eType.getElementType(), r, outer, pMeta));
setCurrentClass(sType);
String wrapperAttr = getJsonClassMeta(sType).getWrapperAttr();
Object o = null;
skipCommentsAndSpace(r);
if (wrapperAttr != null)
skipWrapperAttrStart(r, wrapperAttr);
int c = r.peek();
if (c == -1) {
if (isStrict())
throw new ParseException(this, "Empty input.");
// Let o be null.
} else if ((c == ',' || c == '}' || c == ']')) {
if (isStrict())
throw new ParseException(this, "Missing value detected.");
// Handle bug in Cognos 10.2.1 that can product non-existent values.
// Let o be null;
} else if (c == 'n') {
parseKeyword("null", r);
} else if (sType.isObject()) {
if (c == '{') {
JsonMap m2 = new JsonMap(this);
parseIntoMap2(r, m2, string(), object(), pMeta);
o = cast(m2, pMeta, eType);
} else if (c == '[') {
o = parseIntoCollection2(r, new JsonList(this), object(), pMeta);
} else if (c == '\'' || c == '"') {
o = parseString(r);
if (sType.isChar())
o = parseCharacter(o);
} else if (c >= '0' && c <= '9' || c == '-' || c == '.') {
o = parseNumber(r, null);
} else if (c == 't') {
parseKeyword("true", r);
o = Boolean.TRUE;
} else {
parseKeyword("false", r);
o = Boolean.FALSE;
}
} else if (sType.isBoolean()) {
o = parseBoolean(r);
} else if (sType.isCharSequence()) {
o = parseString(r);
} else if (sType.isChar()) {
o = parseCharacter(parseString(r));
} else if (sType.isNumber()) {
o = parseNumber(r, (Class<? extends Number>)sType.getInnerClass());
} else if (sType.isMap()) {
Map m = (sType.canCreateNewInstance(outer) ? (Map)sType.newInstance(outer) : newGenericMap(sType));
o = parseIntoMap2(r, m, sType.getKeyType(), sType.getValueType(), pMeta);
} else if (sType.isCollection()) {
if (c == '{') {
JsonMap m = new JsonMap(this);
parseIntoMap2(r, m, string(), object(), pMeta);
o = cast(m, pMeta, eType);
} else {
Collection l = (sType.canCreateNewInstance(outer) ? (Collection)sType.newInstance() : new JsonList(this));
o = parseIntoCollection2(r, l, sType, pMeta);
}
} else if (builder != null) {
BeanMap m = toBeanMap(builder.create(this, eType));
o = builder.build(this, parseIntoBeanMap2(r, m).getBean(), eType);
} else if (sType.canCreateNewBean(outer)) {
BeanMap m = newBeanMap(outer, sType.getInnerClass());
o = parseIntoBeanMap2(r, m).getBean();
} else if (sType.canCreateNewInstanceFromString(outer) && (c == '\'' || c == '"')) {
o = sType.newInstanceFromString(outer, parseString(r));
} else if (sType.isArray() || sType.isArgs()) {
if (c == '{') {
JsonMap m = new JsonMap(this);
parseIntoMap2(r, m, string(), object(), pMeta);
o = cast(m, pMeta, eType);
} else {
ArrayList l = (ArrayList)parseIntoCollection2(r, list(), sType, pMeta);
o = toArray(sType, l);
}
} else if (c == '{') {
Map m = new JsonMap(this);
parseIntoMap2(r, m, sType.getKeyType(), sType.getValueType(), pMeta);
if (m.containsKey(getBeanTypePropertyName(eType)))
o = cast((JsonMap)m, pMeta, eType);
else if (sType.getProxyInvocationHandler() != null)
o = newBeanMap(outer, sType.getInnerClass()).load(m).getBean();
else
throw new ParseException(this, "Class ''{0}'' could not be instantiated. Reason: ''{1}''",
sType.getInnerClass().getName(), sType.getNotABeanReason());
} else if (sType.canCreateNewInstanceFromString(outer) && ! isStrict()) {
o = sType.newInstanceFromString(outer, parseString(r));
} else {
throw new ParseException(this, "Unrecognized syntax for class type ''{0}'', starting character ''{1}''",
sType, (char)c);
}
if (wrapperAttr != null)
skipWrapperAttrEnd(r);
if (swap != null && o != null)
o = unswap(swap, o, eType);
if (outer != null)
setParent(eType, o, outer);
return (T)o;
}
private Number parseNumber(ParserReader r, Class<? extends Number> type) throws IOException, ParseException {
int c = r.peek();
if (c == '\'' || c == '"')
return parseNumber(r, parseString(r), type);
return parseNumber(r, r.parseNumberString(), type);
}
private Number parseNumber(ParserReader r, String s, Class<? extends Number> type) throws ParseException {
// JSON has slightly different number rules from Java.
// Strict mode enforces these different rules, lax does not.
if (isStrict()) {
// Lax allows blank strings to represent 0.
// Strict does not allow blank strings.
if (s.isEmpty())
throw new ParseException(this, "Invalid JSON number: ''{0}''", s);
// Need to weed out octal and hexadecimal formats: 0123,-0123,0x123,-0x123.
// Don't weed out 0 or -0.
boolean isNegative = false;
char c = s.charAt(0);
if (c == '-') {
isNegative = true;
c = (s.length() == 1 ? 'x' : s.charAt(1));
}
// JSON doesn't allow '.123' and '-.123'.
if (c == '.')
throw new ParseException(this, "Invalid JSON number: ''{0}''", s);
// '01' is not a valid number, but '0.1', '0e1', '0e+1' are valid.
if (c == '0' && s.length() > (isNegative ? 2 : 1)) {
char c2 = s.charAt((isNegative ? 2 : 1));
if (c2 != '.' && c2 != 'e' && c2 != 'E')
throw new ParseException(this, "Invalid JSON number: ''{0}''", s);
}
// JSON doesn't allow '1.' or '0.e1'.
int i = s.indexOf('.');
if (i != -1 && (s.length() == (i+1) || ! decChars.contains(s.charAt(i+1))))
throw new ParseException(this, "Invalid JSON number: ''{0}''", s);
}
return StringUtils.parseNumber(s, type);
}
private Boolean parseBoolean(ParserReader r) throws IOException, ParseException {
int c = r.peek();
if (c == '\'' || c == '"')
return Boolean.valueOf(parseString(r));
if (c == 't') {
parseKeyword("true", r);
return Boolean.TRUE;
} else if (c == 'f') {
parseKeyword("false", r);
return Boolean.FALSE;
} else {
throw new ParseException(this, "Unrecognized syntax. Expected boolean value, actual=''{0}''", r.read(100));
}
}
private <K,V> Map<K,V> parseIntoMap2(ParserReader r, Map<K,V> m, ClassMeta<K> keyType,
ClassMeta<V> valueType, BeanPropertyMeta pMeta) throws IOException, ParseException, ExecutableException {
if (keyType == null)
keyType = (ClassMeta<K>)string();
int S0=0; // Looking for outer {
int S1=1; // Looking for attrName start.
int S3=3; // Found attrName end, looking for :.
int S4=4; // Found :, looking for valStart: { [ " ' LITERAL.
int S5=5; // Looking for , or }
int S6=6; // Found , looking for attr start.
skipCommentsAndSpace(r);
int state = S0;
String currAttr = null;
int c = 0;
while (c != -1) {
c = r.read();
if (state == S0) {
if (c == '{')
state = S1;
else
break;
} else if (state == S1) {
if (c == '}') {
return m;
} else if (isCommentOrWhitespace(c)) {
skipCommentsAndSpace(r.unread());
} else {
currAttr = parseFieldName(r.unread());
state = S3;
}
} else if (state == S3) {
if (c == ':')
state = S4;
} else if (state == S4) {
if (isCommentOrWhitespace(c)) {
skipCommentsAndSpace(r.unread());
} else {
K key = convertAttrToType(m, currAttr, keyType);
V value = parseAnything(valueType, r.unread(), m, pMeta);
setName(valueType, value, key);
m.put(key, value);
state = S5;
}
} else if (state == S5) {
if (c == ',') {
state = S6;
} else if (isCommentOrWhitespace(c)) {
skipCommentsAndSpace(r.unread());
} else if (c == '}') {
return m;
} else {
break;
}
} else if (state == S6) {
if (c == '}') {
break;
} else if (isCommentOrWhitespace(c)) {
skipCommentsAndSpace(r.unread());
} else {
currAttr = parseFieldName(r.unread());
state = S3;
}
}
}
if (state == S0)
throw new ParseException(this, "Expected '{' at beginning of JSON object.");
if (state == S1)
throw new ParseException(this, "Could not find attribute name on JSON object.");
if (state == S3)
throw new ParseException(this, "Could not find ':' following attribute name on JSON object.");
if (state == S4)
throw new ParseException(this, "Expected one of the following characters: {,[,',\",LITERAL.");
if (state == S5)
throw new ParseException(this, "Could not find '}' marking end of JSON object.");
if (state == S6)
throw new ParseException(this, "Unexpected '}' found in JSON object.");
return null; // Unreachable.
}
/*
* Parse a JSON attribute from the character array at the specified position, then
* set the position marker to the last character in the field name.
*/
private String parseFieldName(ParserReader r) throws IOException, ParseException {
int c = r.peek();
if (c == '\'' || c == '"')
return parseString(r);
if (isStrict())
throw new ParseException(this, "Unquoted attribute detected.");
if (! VALID_BARE_CHARS.contains(c))
throw new ParseException(this, "Could not find the start of the field name.");
r.mark();
// Look for whitespace.
while (c != -1) {
c = r.read();
if (! VALID_BARE_CHARS.contains(c)) {
r.unread();
String s = r.getMarked().intern();
return s.equals("null") ? null : s;
}
}
throw new ParseException(this, "Could not find the end of the field name.");
}
private static final AsciiSet VALID_BARE_CHARS = AsciiSet.create().range('A','Z').range('a','z').range('0','9').chars("$_-.").build();
private <E> Collection<E> parseIntoCollection2(ParserReader r, Collection<E> l,
ClassMeta<?> type, BeanPropertyMeta pMeta) throws IOException, ParseException, ExecutableException {
final int
S0=0, // Looking for outermost [
S1=1, // Looking for starting [ or { or " or ' or LITERAL or ]
S2=2, // Looking for , or ]
S3=3; // Looking for starting [ or { or " or ' or LITERAL
int argIndex = 0;
int state = S0;
int c = 0;
while (c != -1) {
c = r.read();
if (state == S0) {
if (c == '[')
state = S1;
else if (isCommentOrWhitespace(c))
skipCommentsAndSpace(r.unread());
else
break; // Invalid character found.
} else if (state == S1) {
if (c == ']') {
return l;
} else if (isCommentOrWhitespace(c)) {
skipCommentsAndSpace(r.unread());
} else if (c != -1) {
l.add((E)parseAnything(type.isArgs() ? type.getArg(argIndex++) : type.getElementType(), r.unread(), l, pMeta));
state = S2;
}
} else if (state == S2) {
if (c == ',') {
state = S3;
} else if (isCommentOrWhitespace(c)) {
skipCommentsAndSpace(r.unread());
} else if (c == ']') {
return l;
} else {
break; // Invalid character found.
}
} else if (state == S3) {
if (isCommentOrWhitespace(c)) {
skipCommentsAndSpace(r.unread());
} else if (c == ']') {
break;
} else if (c != -1) {
l.add((E)parseAnything(type.isArgs() ? type.getArg(argIndex++) : type.getElementType(), r.unread(), l, pMeta));
state = S2;
}
}
}
if (state == S0)
throw new ParseException(this, "Expected '[' at beginning of JSON array.");
if (state == S1)
throw new ParseException(this, "Expected one of the following characters: {,[,',\",LITERAL.");
if (state == S2)
throw new ParseException(this, "Expected ',' or ']'.");
if (state == S3)
throw new ParseException(this, "Unexpected trailing comma in array.");
return null; // Unreachable.
}
private <T> BeanMap<T> parseIntoBeanMap2(ParserReader r, BeanMap<T> m) throws IOException, ParseException, ExecutableException {
int S0=0; // Looking for outer {
int S1=1; // Looking for attrName start.
int S3=3; // Found attrName end, looking for :.
int S4=4; // Found :, looking for valStart: { [ " ' LITERAL.
int S5=5; // Looking for , or }
int state = S0;
String currAttr = "";
int c = 0;
mark();
try {
while (c != -1) {
c = r.read();
if (state == S0) {
if (c == '{') {
state = S1;
} else if (isCommentOrWhitespace(c)) {
skipCommentsAndSpace(r.unread());
} else {
break;
}
} else if (state == S1) {
if (c == '}') {
return m;
} else if (isCommentOrWhitespace(c)) {
skipCommentsAndSpace(r.unread());
} else {
r.unread();
mark();
currAttr = parseFieldName(r);
state = S3;
}
} else if (state == S3) {
if (c == ':')
state = S4;
} else if (state == S4) {
if (isCommentOrWhitespace(c)) {
skipCommentsAndSpace(r.unread());
} else {
if (! currAttr.equals(getBeanTypePropertyName(m.getClassMeta()))) {
BeanPropertyMeta pMeta = m.getPropertyMeta(currAttr);
setCurrentProperty(pMeta);
if (pMeta == null) {
onUnknownProperty(currAttr, m, parseAnything(object(), r.unread(), m.getBean(false), null));
unmark();
} else {
unmark();
ClassMeta<?> cm = pMeta.getClassMeta();
Object value = parseAnything(cm, r.unread(), m.getBean(false), pMeta);
setName(cm, value, currAttr);
try {
pMeta.set(m, currAttr, value);
} catch (BeanRuntimeException e) {
onBeanSetterException(pMeta, e);
throw e;
}
}
setCurrentProperty(null);
}
state = S5;
}
} else if (state == S5) {
if (c == ',')
state = S1;
else if (isCommentOrWhitespace(c))
skipCommentsAndSpace(r.unread());
else if (c == '}') {
return m;
}
}
}
if (state == S0)
throw new ParseException(this, "Expected '{' at beginning of JSON object.");
if (state == S1)
throw new ParseException(this, "Could not find attribute name on JSON object.");
if (state == S3)
throw new ParseException(this, "Could not find ':' following attribute name on JSON object.");
if (state == S4)
throw new ParseException(this, "Expected one of the following characters: {,[,',\",LITERAL.");
if (state == S5)
throw new ParseException(this, "Could not find '}' marking end of JSON object.");
} finally {
unmark();
}
return null; // Unreachable.
}
/*
* Starting from the specified position in the character array, returns the
* position of the character " or '.
* If the string consists of a concatenation of strings (e.g. 'AAA' + "BBB"), this method
* will automatically concatenate the strings and return the result.
*/
private String parseString(ParserReader r) throws IOException, ParseException {
r.mark();
int qc = r.read(); // The quote character being used (" or ')
if (qc != '"' && isStrict()) {
String msg = (
qc == '\''
? "Invalid quote character \"{0}\" being used."
: "Did not find quote character marking beginning of string. Character=\"{0}\""
);
throw new ParseException(this, msg, (char)qc);
}
final boolean isQuoted = (qc == '\'' || qc == '"');
String s = null;
boolean isInEscape = false;
int c = 0;
while (c != -1) {
c = r.read();
// Strict syntax requires that all control characters be escaped.
if (isStrict() && c <= 0x1F)
throw new ParseException(this, "Unescaped control character encountered: ''0x{0}''", String.format("%04X", c));
if (isInEscape) {
switch (c) {
case 'n': r.replace('\n'); break;
case 'r': r.replace('\r'); break;
case 't': r.replace('\t'); break;
case 'f': r.replace('\f'); break;
case 'b': r.replace('\b'); break;
case '\\': r.replace('\\'); break;
case '/': r.replace('/'); break;
case '\'': r.replace('\''); break;
case '"': r.replace('"'); break;
case 'u': {
String n = r.read(4);
try {
r.replace(Integer.parseInt(n, 16), 6);
} catch (NumberFormatException e) {
throw new ParseException(this, "Invalid Unicode escape sequence in string.");
}
break;
}
default:
throw new ParseException(this, "Invalid escape sequence in string.");
}
isInEscape = false;
} else {
if (c == '\\') {
isInEscape = true;
r.delete();
} else if (isQuoted) {
if (c == qc) {
s = r.getMarked(1, -1);
break;
}
} else {
if (c == ',' || c == '}' || c == ']' || isWhitespace(c)) {
s = r.getMarked(0, -1);
r.unread();
break;
} else if (c == -1) {
s = r.getMarked(0, 0);
break;
}
}
}
}
if (s == null)
throw new ParseException(this, "Could not find expected end character ''{0}''.", (char)qc);
// Look for concatenated string (i.e. whitespace followed by +).
skipCommentsAndSpace(r);
if (r.peek() == '+') {
if (isStrict())
throw new ParseException(this, "String concatenation detected.");
r.read(); // Skip past '+', NOSONAR - Intentional.
skipCommentsAndSpace(r);
s += parseString(r);
}
return trim(s); // End of input reached.
}
/*
* Looks for the keywords true, false, or null.
* Throws an exception if any of these keywords are not found at the specified position.
*/
private void parseKeyword(String keyword, ParserReader r) throws IOException, ParseException {
try {
String s = r.read(keyword.length());
if (s.equals(keyword))
return;
throw new ParseException(this, "Unrecognized syntax. Expected=''{0}'', Actual=''{1}''", keyword, s);
} catch (IndexOutOfBoundsException e) {
throw new ParseException(this, "Unrecognized syntax. Expected=''{0}'', found end-of-file.", keyword);
}
}
/*
* Doesn't actually parse anything, but moves the position beyond any whitespace or comments.
* If positionOnNext is 'true', then the cursor will be set to the point immediately after
* the comments and whitespace. Otherwise, the cursor will be set to the last position of
* the comments and whitespace.
*/
private void skipCommentsAndSpace(ParserReader r) throws IOException, ParseException {
int c = 0;
while ((c = r.read()) != -1) {
if (! isWhitespace(c)) {
if (c == '/') {
if (isStrict())
throw new ParseException(this, "Javascript comment detected.");
skipComments(r);
} else {
r.unread();
return;
}
}
}
}
/*
* Doesn't actually parse anything, but moves the position beyond the construct "{wrapperAttr:" when
* the @Json(wrapperAttr) annotation is used on a class.
*/
private void skipWrapperAttrStart(ParserReader r, String wrapperAttr) throws IOException, ParseException {
final int
S0=0, // Looking for outer '{'
S1=1, // Looking for attrName start.
S3=3, // Found attrName end, looking for :.
S4=4; // Found :, looking for valStart: { [ " ' LITERAL.
int state = S0;
String currAttr = null;
int c = 0;
while (c != -1) {
c = r.read();
if (state == S0) {
if (c == '{')
state = S1;
} else if (state == S1) {
if (isCommentOrWhitespace(c)) {
skipCommentsAndSpace(r.unread());
} else {
currAttr = parseFieldName(r.unread());
if (! currAttr.equals(wrapperAttr))
throw new ParseException(this,
"Expected to find wrapper attribute ''{0}'' but found attribute ''{1}''", wrapperAttr, currAttr);
state = S3;
}
} else if (state == S3) {
if (c == ':')
state = S4;
} else if (state == S4) {
if (isCommentOrWhitespace(c)) {
skipCommentsAndSpace(r.unread());
} else {
r.unread();
return;
}
}
}
if (state == S0)
throw new ParseException(this, "Expected '{' at beginning of JSON object.");
if (state == S1)
throw new ParseException(this, "Could not find attribute name on JSON object.");
if (state == S3)
throw new ParseException(this, "Could not find ':' following attribute name on JSON object.");
if (state == S4)
throw new ParseException(this, "Expected one of the following characters: {,[,',\",LITERAL.");
}
/*
* Doesn't actually parse anything, but moves the position beyond the construct "}" when
* the @Json(wrapperAttr) annotation is used on a class.
*/
private void skipWrapperAttrEnd(ParserReader r) throws ParseException, IOException {
int c = 0;
while ((c = r.read()) != -1) {
if (! isWhitespace(c)) {
if (c == '/') {
if (isStrict())
throw new ParseException(this, "Javascript comment detected.");
skipComments(r);
} else if (c == '}') {
return;
} else {
throw new ParseException(this, "Could not find '}' at the end of JSON wrapper object.");
}
}
}
}
/*
* Doesn't actually parse anything, but when positioned at the beginning of comment,
* it will move the pointer to the last character in the comment.
*/
private void skipComments(ParserReader r) throws ParseException, IOException {
int c = r.read();
// "/* */" style comments
if (c == '*') {
while (c != -1)
if ((c = r.read()) == '*')
if ((c = r.read()) == '/')
return;
// "//" style comments
} else if (c == '/') {
while (c != -1) {
c = r.read();
if (c == -1 || c == '\n')
return;
}
}
throw new ParseException(this, "Open ended comment.");
}
/*
* Call this method after you've finished a parsing a string to make sure that if there's any
* remainder in the input, that it consists only of whitespace and comments.
*/
private void validateEnd(ParserReader r) throws IOException, ParseException {
if (! isValidateEnd())
return;
skipCommentsAndSpace(r);
int c = r.read();
if (c != -1 && c != ';') // var x = {...}; expressions can end with a semicolon.
throw new ParseException(this, "Remainder after parse: ''{0}''.", (char)c);
}
//-----------------------------------------------------------------------------------------------------------------
// Properties
//-----------------------------------------------------------------------------------------------------------------
/**
* Validate end.
*
* @see JsonParser.Builder#validateEnd()
* @return
* <jk>true</jk> if after parsing a POJO from the input, verifies that the remaining input in
* the stream consists of only comments or whitespace.
*/
protected boolean isValidateEnd() {
return ctx.isValidateEnd();
}
//-----------------------------------------------------------------------------------------------------------------
// Extended metadata
//-----------------------------------------------------------------------------------------------------------------
/**
* Returns the language-specific metadata on the specified class.
*
* @param cm The class to return the metadata on.
* @return The metadata.
*/
protected JsonClassMeta getJsonClassMeta(ClassMeta<?> cm) {
return ctx.getJsonClassMeta(cm);
}
}
|
google/j2objc | 35,076 | jre_emul/android/platform/libcore/ojluni/src/main/java/java/util/ArrayDeque.java | /*
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
/*
* This file is available under and governed by the GNU General Public
* License version 2 only, as published by the Free Software Foundation.
* However, the following notice accompanied the original version of this
* file:
*
* Written by Josh Bloch of Google Inc. and released to the public domain,
* as explained at http://creativecommons.org/publicdomain/zero/1.0/.
*/
package java.util;
import java.io.Serializable;
import java.util.function.Consumer;
// BEGIN android-note
// removed link to collections framework docs
// END android-note
/**
* Resizable-array implementation of the {@link Deque} interface. Array
* deques have no capacity restrictions; they grow as necessary to support
* usage. They are not thread-safe; in the absence of external
* synchronization, they do not support concurrent access by multiple threads.
* Null elements are prohibited. This class is likely to be faster than
* {@link Stack} when used as a stack, and faster than {@link LinkedList}
* when used as a queue.
*
* <p>Most {@code ArrayDeque} operations run in amortized constant time.
* Exceptions include
* {@link #remove(Object) remove},
* {@link #removeFirstOccurrence removeFirstOccurrence},
* {@link #removeLastOccurrence removeLastOccurrence},
* {@link #contains contains},
* {@link #iterator iterator.remove()},
* and the bulk operations, all of which run in linear time.
*
* <p>The iterators returned by this class's {@link #iterator() iterator}
* method are <em>fail-fast</em>: If the deque is modified at any time after
* the iterator is created, in any way except through the iterator's own
* {@code remove} method, the iterator will generally throw a {@link
* ConcurrentModificationException}. Thus, in the face of concurrent
* modification, the iterator fails quickly and cleanly, rather than risking
* arbitrary, non-deterministic behavior at an undetermined time in the
* future.
*
* <p>Note that the fail-fast behavior of an iterator cannot be guaranteed
* as it is, generally speaking, impossible to make any hard guarantees in the
* presence of unsynchronized concurrent modification. Fail-fast iterators
* throw {@code ConcurrentModificationException} on a best-effort basis.
* Therefore, it would be wrong to write a program that depended on this
* exception for its correctness: <i>the fail-fast behavior of iterators
* should be used only to detect bugs.</i>
*
* <p>This class and its iterator implement all of the
* <em>optional</em> methods of the {@link Collection} and {@link
* Iterator} interfaces.
*
* @author Josh Bloch and Doug Lea
* @since 1.6
* @param <E> the type of elements held in this deque
*/
public class ArrayDeque<E> extends AbstractCollection<E>
implements Deque<E>, Cloneable, Serializable
{
/**
* The array in which the elements of the deque are stored.
* The capacity of the deque is the length of this array, which is
* always a power of two. The array is never allowed to become
* full, except transiently within an addX method where it is
* resized (see doubleCapacity) immediately upon becoming full,
* thus avoiding head and tail wrapping around to equal each
* other. We also guarantee that all array cells not holding
* deque elements are always null.
*/
transient Object[] elements; // non-private to simplify nested class access
/**
* The index of the element at the head of the deque (which is the
* element that would be removed by remove() or pop()); or an
* arbitrary number equal to tail if the deque is empty.
*/
transient int head;
/**
* The index at which the next element would be added to the tail
* of the deque (via addLast(E), add(E), or push(E)).
*/
transient int tail;
/**
* The minimum capacity that we'll use for a newly created deque.
* Must be a power of 2.
*/
private static final int MIN_INITIAL_CAPACITY = 8;
// ****** Array allocation and resizing utilities ******
/**
* Allocates empty array to hold the given number of elements.
*
* @param numElements the number of elements to hold
*/
private void allocateElements(int numElements) {
int initialCapacity = MIN_INITIAL_CAPACITY;
// Find the best power of two to hold elements.
// Tests "<=" because arrays aren't kept full.
if (numElements >= initialCapacity) {
initialCapacity = numElements;
initialCapacity |= (initialCapacity >>> 1);
initialCapacity |= (initialCapacity >>> 2);
initialCapacity |= (initialCapacity >>> 4);
initialCapacity |= (initialCapacity >>> 8);
initialCapacity |= (initialCapacity >>> 16);
initialCapacity++;
if (initialCapacity < 0) // Too many elements, must back off
initialCapacity >>>= 1; // Good luck allocating 2^30 elements
}
elements = new Object[initialCapacity];
}
/**
* Doubles the capacity of this deque. Call only when full, i.e.,
* when head and tail have wrapped around to become equal.
*/
private void doubleCapacity() {
assert head == tail;
int p = head;
int n = elements.length;
int r = n - p; // number of elements to the right of p
int newCapacity = n << 1;
if (newCapacity < 0)
throw new IllegalStateException("Sorry, deque too big");
Object[] a = new Object[newCapacity];
System.arraycopy(elements, p, a, 0, r);
System.arraycopy(elements, 0, a, r, p);
// Android-added: Clear old array instance that's about to become eligible for GC.
// This ensures that array elements can be eligible for garbage collection even
// before the array itself is recognized as being eligible; the latter might
// take a while in some GC implementations, if the array instance is longer lived
// (its liveness rarely checked) than some of its contents.
Arrays.fill(elements, null);
elements = a;
head = 0;
tail = n;
}
/**
* Constructs an empty array deque with an initial capacity
* sufficient to hold 16 elements.
*/
public ArrayDeque() {
elements = new Object[16];
}
/**
* Constructs an empty array deque with an initial capacity
* sufficient to hold the specified number of elements.
*
* @param numElements lower bound on initial capacity of the deque
*/
public ArrayDeque(int numElements) {
allocateElements(numElements);
}
/**
* Constructs a deque containing the elements of the specified
* collection, in the order they are returned by the collection's
* iterator. (The first element returned by the collection's
* iterator becomes the first element, or <i>front</i> of the
* deque.)
*
* @param c the collection whose elements are to be placed into the deque
* @throws NullPointerException if the specified collection is null
*/
public ArrayDeque(Collection<? extends E> c) {
allocateElements(c.size());
addAll(c);
}
// The main insertion and extraction methods are addFirst,
// addLast, pollFirst, pollLast. The other methods are defined in
// terms of these.
/**
* Inserts the specified element at the front of this deque.
*
* @param e the element to add
* @throws NullPointerException if the specified element is null
*/
public void addFirst(E e) {
if (e == null)
throw new NullPointerException();
elements[head = (head - 1) & (elements.length - 1)] = e;
if (head == tail)
doubleCapacity();
}
/**
* Inserts the specified element at the end of this deque.
*
* <p>This method is equivalent to {@link #add}.
*
* @param e the element to add
* @throws NullPointerException if the specified element is null
*/
public void addLast(E e) {
if (e == null)
throw new NullPointerException();
elements[tail] = e;
if ( (tail = (tail + 1) & (elements.length - 1)) == head)
doubleCapacity();
}
/**
* Inserts the specified element at the front of this deque.
*
* @param e the element to add
* @return {@code true} (as specified by {@link Deque#offerFirst})
* @throws NullPointerException if the specified element is null
*/
public boolean offerFirst(E e) {
addFirst(e);
return true;
}
/**
* Inserts the specified element at the end of this deque.
*
* @param e the element to add
* @return {@code true} (as specified by {@link Deque#offerLast})
* @throws NullPointerException if the specified element is null
*/
public boolean offerLast(E e) {
addLast(e);
return true;
}
/**
* @throws NoSuchElementException {@inheritDoc}
*/
public E removeFirst() {
E x = pollFirst();
if (x == null)
throw new NoSuchElementException();
return x;
}
/**
* @throws NoSuchElementException {@inheritDoc}
*/
public E removeLast() {
E x = pollLast();
if (x == null)
throw new NoSuchElementException();
return x;
}
public E pollFirst() {
final Object[] elements = this.elements;
final int h = head;
@SuppressWarnings("unchecked")
E result = (E) elements[h];
// Element is null if deque empty
if (result != null) {
elements[h] = null; // Must null out slot
head = (h + 1) & (elements.length - 1);
}
return result;
}
public E pollLast() {
final Object[] elements = this.elements;
final int t = (tail - 1) & (elements.length - 1);
@SuppressWarnings("unchecked")
E result = (E) elements[t];
if (result != null) {
elements[t] = null;
tail = t;
}
return result;
}
/**
* @throws NoSuchElementException {@inheritDoc}
*/
public E getFirst() {
@SuppressWarnings("unchecked")
E result = (E) elements[head];
if (result == null)
throw new NoSuchElementException();
return result;
}
/**
* @throws NoSuchElementException {@inheritDoc}
*/
public E getLast() {
@SuppressWarnings("unchecked")
E result = (E) elements[(tail - 1) & (elements.length - 1)];
if (result == null)
throw new NoSuchElementException();
return result;
}
@SuppressWarnings("unchecked")
public E peekFirst() {
// elements[head] is null if deque empty
return (E) elements[head];
}
@SuppressWarnings("unchecked")
public E peekLast() {
return (E) elements[(tail - 1) & (elements.length - 1)];
}
/**
* Removes the first occurrence of the specified element in this
* deque (when traversing the deque from head to tail).
* If the deque does not contain the element, it is unchanged.
* More formally, removes the first element {@code e} such that
* {@code o.equals(e)} (if such an element exists).
* Returns {@code true} if this deque contained the specified element
* (or equivalently, if this deque changed as a result of the call).
*
* @param o element to be removed from this deque, if present
* @return {@code true} if the deque contained the specified element
*/
public boolean removeFirstOccurrence(Object o) {
if (o != null) {
int mask = elements.length - 1;
int i = head;
for (Object x; (x = elements[i]) != null; i = (i + 1) & mask) {
if (o.equals(x)) {
delete(i);
return true;
}
}
}
return false;
}
/**
* Removes the last occurrence of the specified element in this
* deque (when traversing the deque from head to tail).
* If the deque does not contain the element, it is unchanged.
* More formally, removes the last element {@code e} such that
* {@code o.equals(e)} (if such an element exists).
* Returns {@code true} if this deque contained the specified element
* (or equivalently, if this deque changed as a result of the call).
*
* @param o element to be removed from this deque, if present
* @return {@code true} if the deque contained the specified element
*/
public boolean removeLastOccurrence(Object o) {
if (o != null) {
int mask = elements.length - 1;
int i = (tail - 1) & mask;
for (Object x; (x = elements[i]) != null; i = (i - 1) & mask) {
if (o.equals(x)) {
delete(i);
return true;
}
}
}
return false;
}
// *** Queue methods ***
/**
* Inserts the specified element at the end of this deque.
*
* <p>This method is equivalent to {@link #addLast}.
*
* @param e the element to add
* @return {@code true} (as specified by {@link Collection#add})
* @throws NullPointerException if the specified element is null
*/
public boolean add(E e) {
addLast(e);
return true;
}
/**
* Inserts the specified element at the end of this deque.
*
* <p>This method is equivalent to {@link #offerLast}.
*
* @param e the element to add
* @return {@code true} (as specified by {@link Queue#offer})
* @throws NullPointerException if the specified element is null
*/
public boolean offer(E e) {
return offerLast(e);
}
/**
* Retrieves and removes the head of the queue represented by this deque.
*
* This method differs from {@link #poll poll} only in that it throws an
* exception if this deque is empty.
*
* <p>This method is equivalent to {@link #removeFirst}.
*
* @return the head of the queue represented by this deque
* @throws NoSuchElementException {@inheritDoc}
*/
public E remove() {
return removeFirst();
}
/**
* Retrieves and removes the head of the queue represented by this deque
* (in other words, the first element of this deque), or returns
* {@code null} if this deque is empty.
*
* <p>This method is equivalent to {@link #pollFirst}.
*
* @return the head of the queue represented by this deque, or
* {@code null} if this deque is empty
*/
public E poll() {
return pollFirst();
}
/**
* Retrieves, but does not remove, the head of the queue represented by
* this deque. This method differs from {@link #peek peek} only in
* that it throws an exception if this deque is empty.
*
* <p>This method is equivalent to {@link #getFirst}.
*
* @return the head of the queue represented by this deque
* @throws NoSuchElementException {@inheritDoc}
*/
public E element() {
return getFirst();
}
/**
* Retrieves, but does not remove, the head of the queue represented by
* this deque, or returns {@code null} if this deque is empty.
*
* <p>This method is equivalent to {@link #peekFirst}.
*
* @return the head of the queue represented by this deque, or
* {@code null} if this deque is empty
*/
public E peek() {
return peekFirst();
}
// *** Stack methods ***
/**
* Pushes an element onto the stack represented by this deque. In other
* words, inserts the element at the front of this deque.
*
* <p>This method is equivalent to {@link #addFirst}.
*
* @param e the element to push
* @throws NullPointerException if the specified element is null
*/
public void push(E e) {
addFirst(e);
}
/**
* Pops an element from the stack represented by this deque. In other
* words, removes and returns the first element of this deque.
*
* <p>This method is equivalent to {@link #removeFirst()}.
*
* @return the element at the front of this deque (which is the top
* of the stack represented by this deque)
* @throws NoSuchElementException {@inheritDoc}
*/
public E pop() {
return removeFirst();
}
private void checkInvariants() {
assert elements[tail] == null;
assert head == tail ? elements[head] == null :
(elements[head] != null &&
elements[(tail - 1) & (elements.length - 1)] != null);
assert elements[(head - 1) & (elements.length - 1)] == null;
}
/**
* Removes the element at the specified position in the elements array,
* adjusting head and tail as necessary. This can result in motion of
* elements backwards or forwards in the array.
*
* <p>This method is called delete rather than remove to emphasize
* that its semantics differ from those of {@link List#remove(int)}.
*
* @return true if elements moved backwards
*/
boolean delete(int i) {
checkInvariants();
final Object[] elements = this.elements;
final int mask = elements.length - 1;
final int h = head;
final int t = tail;
final int front = (i - h) & mask;
final int back = (t - i) & mask;
// Invariant: head <= i < tail mod circularity
if (front >= ((t - h) & mask))
throw new ConcurrentModificationException();
// Optimize for least element motion
if (front < back) {
if (h <= i) {
System.arraycopy(elements, h, elements, h + 1, front);
} else { // Wrap around
System.arraycopy(elements, 0, elements, 1, i);
elements[0] = elements[mask];
System.arraycopy(elements, h, elements, h + 1, mask - h);
}
elements[h] = null;
head = (h + 1) & mask;
return false;
} else {
if (i < t) { // Copy the null tail as well
System.arraycopy(elements, i + 1, elements, i, back);
tail = t - 1;
} else { // Wrap around
System.arraycopy(elements, i + 1, elements, i, mask - i);
elements[mask] = elements[0];
System.arraycopy(elements, 1, elements, 0, t);
tail = (t - 1) & mask;
}
return true;
}
}
// *** Collection Methods ***
/**
* Returns the number of elements in this deque.
*
* @return the number of elements in this deque
*/
public int size() {
return (tail - head) & (elements.length - 1);
}
/**
* Returns {@code true} if this deque contains no elements.
*
* @return {@code true} if this deque contains no elements
*/
public boolean isEmpty() {
return head == tail;
}
/**
* Returns an iterator over the elements in this deque. The elements
* will be ordered from first (head) to last (tail). This is the same
* order that elements would be dequeued (via successive calls to
* {@link #remove} or popped (via successive calls to {@link #pop}).
*
* @return an iterator over the elements in this deque
*/
public Iterator<E> iterator() {
return new DeqIterator();
}
public Iterator<E> descendingIterator() {
return new DescendingIterator();
}
private class DeqIterator implements Iterator<E> {
/**
* Index of element to be returned by subsequent call to next.
*/
private int cursor = head;
/**
* Tail recorded at construction (also in remove), to stop
* iterator and also to check for comodification.
*/
private int fence = tail;
/**
* Index of element returned by most recent call to next.
* Reset to -1 if element is deleted by a call to remove.
*/
private int lastRet = -1;
public boolean hasNext() {
return cursor != fence;
}
public E next() {
if (cursor == fence)
throw new NoSuchElementException();
@SuppressWarnings("unchecked")
E result = (E) elements[cursor];
// This check doesn't catch all possible comodifications,
// but does catch the ones that corrupt traversal
if (tail != fence || result == null)
throw new ConcurrentModificationException();
lastRet = cursor;
cursor = (cursor + 1) & (elements.length - 1);
return result;
}
public void remove() {
if (lastRet < 0)
throw new IllegalStateException();
if (delete(lastRet)) { // if left-shifted, undo increment in next()
cursor = (cursor - 1) & (elements.length - 1);
fence = tail;
}
lastRet = -1;
}
@Override
public void forEachRemaining(Consumer<? super E> action) {
Objects.requireNonNull(action);
Object[] a = elements;
int m = a.length - 1, f = fence, i = cursor;
cursor = f;
while (i != f) {
@SuppressWarnings("unchecked") E e = (E)a[i];
i = (i + 1) & m;
// Android-note: This uses a different heuristic for detecting
// concurrent modification exceptions than next(). As such, this is a less
// precise test.
if (e == null)
throw new ConcurrentModificationException();
action.accept(e);
}
}
}
/**
* This class is nearly a mirror-image of DeqIterator, using tail
* instead of head for initial cursor, and head instead of tail
* for fence.
*/
private class DescendingIterator implements Iterator<E> {
private int cursor = tail;
private int fence = head;
private int lastRet = -1;
public boolean hasNext() {
return cursor != fence;
}
public E next() {
if (cursor == fence)
throw new NoSuchElementException();
cursor = (cursor - 1) & (elements.length - 1);
@SuppressWarnings("unchecked")
E result = (E) elements[cursor];
if (head != fence || result == null)
throw new ConcurrentModificationException();
lastRet = cursor;
return result;
}
public void remove() {
if (lastRet < 0)
throw new IllegalStateException();
if (!delete(lastRet)) {
cursor = (cursor + 1) & (elements.length - 1);
fence = head;
}
lastRet = -1;
}
}
/**
* Returns {@code true} if this deque contains the specified element.
* More formally, returns {@code true} if and only if this deque contains
* at least one element {@code e} such that {@code o.equals(e)}.
*
* @param o object to be checked for containment in this deque
* @return {@code true} if this deque contains the specified element
*/
public boolean contains(Object o) {
if (o != null) {
int mask = elements.length - 1;
int i = head;
for (Object x; (x = elements[i]) != null; i = (i + 1) & mask) {
if (o.equals(x))
return true;
}
}
return false;
}
/**
* Removes a single instance of the specified element from this deque.
* If the deque does not contain the element, it is unchanged.
* More formally, removes the first element {@code e} such that
* {@code o.equals(e)} (if such an element exists).
* Returns {@code true} if this deque contained the specified element
* (or equivalently, if this deque changed as a result of the call).
*
* <p>This method is equivalent to {@link #removeFirstOccurrence(Object)}.
*
* @param o element to be removed from this deque, if present
* @return {@code true} if this deque contained the specified element
*/
public boolean remove(Object o) {
return removeFirstOccurrence(o);
}
/**
* Removes all of the elements from this deque.
* The deque will be empty after this call returns.
*/
public void clear() {
int h = head;
int t = tail;
if (h != t) { // clear all cells
head = tail = 0;
int i = h;
int mask = elements.length - 1;
do {
elements[i] = null;
i = (i + 1) & mask;
} while (i != t);
}
}
/**
* Returns an array containing all of the elements in this deque
* in proper sequence (from first to last element).
*
* <p>The returned array will be "safe" in that no references to it are
* maintained by this deque. (In other words, this method must allocate
* a new array). The caller is thus free to modify the returned array.
*
* <p>This method acts as bridge between array-based and collection-based
* APIs.
*
* @return an array containing all of the elements in this deque
*/
public Object[] toArray() {
final int head = this.head;
final int tail = this.tail;
boolean wrap = (tail < head);
int end = wrap ? tail + elements.length : tail;
Object[] a = Arrays.copyOfRange(elements, head, end);
if (wrap)
System.arraycopy(elements, 0, a, elements.length - head, tail);
return a;
}
/**
* Returns an array containing all of the elements in this deque in
* proper sequence (from first to last element); the runtime type of the
* returned array is that of the specified array. If the deque fits in
* the specified array, it is returned therein. Otherwise, a new array
* is allocated with the runtime type of the specified array and the
* size of this deque.
*
* <p>If this deque fits in the specified array with room to spare
* (i.e., the array has more elements than this deque), the element in
* the array immediately following the end of the deque is set to
* {@code null}.
*
* <p>Like the {@link #toArray()} method, this method acts as bridge between
* array-based and collection-based APIs. Further, this method allows
* precise control over the runtime type of the output array, and may,
* under certain circumstances, be used to save allocation costs.
*
* <p>Suppose {@code x} is a deque known to contain only strings.
* The following code can be used to dump the deque into a newly
* allocated array of {@code String}:
*
* <pre> {@code String[] y = x.toArray(new String[0]);}</pre>
*
* Note that {@code toArray(new Object[0])} is identical in function to
* {@code toArray()}.
*
* @param a the array into which the elements of the deque are to
* be stored, if it is big enough; otherwise, a new array of the
* same runtime type is allocated for this purpose
* @return an array containing all of the elements in this deque
* @throws ArrayStoreException if the runtime type of the specified array
* is not a supertype of the runtime type of every element in
* this deque
* @throws NullPointerException if the specified array is null
*/
@SuppressWarnings("unchecked")
public <T> T[] toArray(T[] a) {
final int head = this.head;
final int tail = this.tail;
boolean wrap = (tail < head);
int size = (tail - head) + (wrap ? elements.length : 0);
int firstLeg = size - (wrap ? tail : 0);
int len = a.length;
if (size > len) {
a = (T[]) Arrays.copyOfRange(elements, head, head + size,
a.getClass());
} else {
System.arraycopy(elements, head, a, 0, firstLeg);
if (size < len)
a[size] = null;
}
if (wrap)
System.arraycopy(elements, 0, a, firstLeg, tail);
return a;
}
// *** Object methods ***
/**
* Returns a copy of this deque.
*
* @return a copy of this deque
*/
public ArrayDeque<E> clone() {
try {
@SuppressWarnings("unchecked")
ArrayDeque<E> result = (ArrayDeque<E>) super.clone();
result.elements = Arrays.copyOf(elements, elements.length);
return result;
} catch (CloneNotSupportedException e) {
throw new AssertionError();
}
}
private static final long serialVersionUID = 2340985798034038923L;
/**
* Saves this deque to a stream (that is, serializes it).
*
* @param s the stream
* @throws java.io.IOException if an I/O error occurs
* @serialData The current size ({@code int}) of the deque,
* followed by all of its elements (each an object reference) in
* first-to-last order.
*/
private void writeObject(java.io.ObjectOutputStream s)
throws java.io.IOException {
s.defaultWriteObject();
// Write out size
s.writeInt(size());
// Write out elements in order.
int mask = elements.length - 1;
for (int i = head; i != tail; i = (i + 1) & mask)
s.writeObject(elements[i]);
}
/**
* Reconstitutes this deque from a stream (that is, deserializes it).
* @param s the stream
* @throws ClassNotFoundException if the class of a serialized object
* could not be found
* @throws java.io.IOException if an I/O error occurs
*/
private void readObject(java.io.ObjectInputStream s)
throws java.io.IOException, ClassNotFoundException {
s.defaultReadObject();
// Read in size and allocate array
int size = s.readInt();
allocateElements(size);
head = 0;
tail = size;
// Read in all elements in the proper order.
for (int i = 0; i < size; i++)
elements[i] = s.readObject();
}
/**
* Creates a <em><a href="Spliterator.html#binding">late-binding</a></em>
* and <em>fail-fast</em> {@link Spliterator} over the elements in this
* deque.
*
* <p>The {@code Spliterator} reports {@link Spliterator#SIZED},
* {@link Spliterator#SUBSIZED}, {@link Spliterator#ORDERED}, and
* {@link Spliterator#NONNULL}. Overriding implementations should document
* the reporting of additional characteristic values.
*
* @return a {@code Spliterator} over the elements in this deque
* @since 1.8
*/
public Spliterator<E> spliterator() {
return new DeqSpliterator<>(this, -1, -1);
}
static final class DeqSpliterator<E> implements Spliterator<E> {
private final ArrayDeque<E> deq;
private int fence; // -1 until first use
private int index; // current index, modified on traverse/split
/** Creates new spliterator covering the given array and range. */
DeqSpliterator(ArrayDeque<E> deq, int origin, int fence) {
this.deq = deq;
this.index = origin;
this.fence = fence;
}
private int getFence() { // force initialization
int t;
if ((t = fence) < 0) {
t = fence = deq.tail;
index = deq.head;
}
return t;
}
public DeqSpliterator<E> trySplit() {
int t = getFence(), h = index, n = deq.elements.length;
if (h != t && ((h + 1) & (n - 1)) != t) {
if (h > t)
t += n;
int m = ((h + t) >>> 1) & (n - 1);
return new DeqSpliterator<E>(deq, h, index = m);
}
return null;
}
public void forEachRemaining(Consumer<? super E> consumer) {
if (consumer == null)
throw new NullPointerException();
Object[] a = deq.elements;
int m = a.length - 1, f = getFence(), i = index;
index = f;
while (i != f) {
@SuppressWarnings("unchecked") E e = (E)a[i];
i = (i + 1) & m;
if (e == null)
throw new ConcurrentModificationException();
consumer.accept(e);
}
}
public boolean tryAdvance(Consumer<? super E> consumer) {
if (consumer == null)
throw new NullPointerException();
Object[] a = deq.elements;
int m = a.length - 1, f = getFence(), i = index;
if (i != f) {
@SuppressWarnings("unchecked") E e = (E)a[i];
index = (i + 1) & m;
if (e == null)
throw new ConcurrentModificationException();
consumer.accept(e);
return true;
}
return false;
}
public long estimateSize() {
int n = getFence() - index;
if (n < 0)
n += deq.elements.length;
return (long) n;
}
@Override
public int characteristics() {
return Spliterator.ORDERED | Spliterator.SIZED |
Spliterator.NONNULL | Spliterator.SUBSIZED;
}
}
}
|
apache/grails-core | 35,046 | grails-datastore-core/src/main/groovy/org/grails/datastore/mapping/core/AbstractSession.java | /* Copyright (C) 2010-2025 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.grails.datastore.mapping.core;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentLinkedQueue;
import jakarta.persistence.FlushModeType;
import com.github.benmanes.caffeine.cache.Caffeine;
import com.github.benmanes.caffeine.cache.RemovalListener;
import org.springframework.beans.BeanWrapper;
import org.springframework.beans.BeanWrapperImpl;
import org.springframework.context.ApplicationEventPublisher;
import org.springframework.core.convert.ConversionFailedException;
import org.springframework.core.convert.ConversionService;
import org.springframework.dao.DataAccessResourceFailureException;
import org.springframework.dao.InvalidDataAccessResourceUsageException;
import org.springframework.transaction.NoTransactionException;
import org.springframework.transaction.TransactionDefinition;
import org.springframework.transaction.support.DefaultTransactionDefinition;
import org.springframework.util.Assert;
import org.grails.datastore.mapping.cache.TPCacheAdapterRepository;
import org.grails.datastore.mapping.config.Entity;
import org.grails.datastore.mapping.core.impl.PendingDelete;
import org.grails.datastore.mapping.core.impl.PendingInsert;
import org.grails.datastore.mapping.core.impl.PendingOperation;
import org.grails.datastore.mapping.core.impl.PendingOperationExecution;
import org.grails.datastore.mapping.core.impl.PendingUpdate;
import org.grails.datastore.mapping.dirty.checking.DirtyCheckable;
import org.grails.datastore.mapping.dirty.checking.DirtyCheckingSupport;
import org.grails.datastore.mapping.engine.EntityAccess;
import org.grails.datastore.mapping.engine.EntityPersister;
import org.grails.datastore.mapping.engine.NativeEntryEntityPersister;
import org.grails.datastore.mapping.engine.NonPersistentTypeException;
import org.grails.datastore.mapping.engine.Persister;
import org.grails.datastore.mapping.model.MappingContext;
import org.grails.datastore.mapping.model.PersistentEntity;
import org.grails.datastore.mapping.model.PersistentProperty;
import org.grails.datastore.mapping.query.Query;
import org.grails.datastore.mapping.query.api.QueryableCriteria;
import org.grails.datastore.mapping.transactions.Transaction;
/**
* Abstract implementation of the {@link org.grails.datastore.mapping.core.Session} interface that uses
* a list of {@link org.grails.datastore.mapping.engine.Persister} instances
* to save, update and delete instances
*
* @param <N>
* @author Graeme Rocher
* @since 1.0
*/
@SuppressWarnings({"rawtypes", "unchecked"})
public abstract class AbstractSession<N> extends AbstractAttributeStoringSession implements SessionImplementor {
public static final String ENTITY_ACCESS = "org.grails.gorm.ENTITY_ACCESS";
private static final RemovalListener<PersistentEntity, Collection<PendingInsert>> EXCEPTION_THROWING_INSERT_LISTENER =
(key, value, cause) -> {
if (cause.wasEvicted()) {
throw new DataAccessResourceFailureException("Maximum number (5000) of insert operations to flush() exceeded. Flush the session periodically to avoid this error for batch operations.");
}
};
private static final RemovalListener<PersistentEntity, Collection<PendingUpdate>> EXCEPTION_THROWING_UPDATE_LISTENER =
(key, value, cause) -> {
if (cause.wasEvicted()) {
throw new DataAccessResourceFailureException("Maximum number (5000) of update operations to flush() exceeded. Flush the session periodically to avoid this error for batch operations.");
}
};
private static final RemovalListener<PersistentEntity, Collection<PendingDelete>> EXCEPTION_THROWING_DELETE_LISTENER =
(key, value, cause) -> {
if (cause.wasEvicted()) {
throw new DataAccessResourceFailureException("Maximum number (5000) of delete operations to flush() exceeded. Flush the session periodically to avoid this error for batch operations.");
}
};
private static final String NULL = "null";
protected Map<Class, Persister> persisters = new ConcurrentHashMap<>();
protected boolean isSynchronizedWithTransaction = false;
private MappingContext mappingContext;
protected ConcurrentLinkedQueue lockedObjects = new ConcurrentLinkedQueue();
protected Transaction transaction;
private Datastore datastore;
private FlushModeType flushMode = FlushModeType.AUTO;
protected Map<Class, Map<Serializable, Object>> firstLevelCache = new ConcurrentHashMap<>();
protected Map<Class, Map<Serializable, Object>> firstLevelEntryCache = new ConcurrentHashMap<>();
protected Map<Class, Map<Serializable, Object>> firstLevelEntryCacheDirtyCheck = new ConcurrentHashMap<>();
protected Map<CollectionKey, Collection> firstLevelCollectionCache = new ConcurrentHashMap<>();
protected TPCacheAdapterRepository cacheAdapterRepository;
private Collection<Serializable> objectsPendingOperations = new ConcurrentLinkedQueue<>();
private Map<PersistentEntity, Collection<PendingInsert>> pendingInserts =
Caffeine.newBuilder()
.removalListener(EXCEPTION_THROWING_INSERT_LISTENER)
.executor(Runnable::run)
.maximumSize(5000).build().asMap();
private Map<PersistentEntity, Collection<PendingUpdate>> pendingUpdates =
Caffeine.newBuilder()
.removalListener(EXCEPTION_THROWING_UPDATE_LISTENER)
.executor(Runnable::run)
.maximumSize(5000).build().asMap();
private Map<PersistentEntity, Collection<PendingDelete>> pendingDeletes =
Caffeine.newBuilder()
.removalListener(EXCEPTION_THROWING_DELETE_LISTENER)
.executor(Runnable::run)
.maximumSize(5000).build().asMap();
protected Collection<Runnable> postFlushOperations = new ConcurrentLinkedQueue<>();
private boolean exceptionOccurred;
protected ApplicationEventPublisher publisher;
protected boolean stateless = false;
protected boolean flushActive = false;
public AbstractSession(Datastore datastore, MappingContext mappingContext,
ApplicationEventPublisher publisher) {
this(datastore, mappingContext, publisher, false);
}
public AbstractSession(Datastore datastore, MappingContext mappingContext,
ApplicationEventPublisher publisher, boolean stateless) {
this.mappingContext = mappingContext;
this.datastore = datastore;
this.publisher = publisher;
this.stateless = stateless;
}
public AbstractSession(Datastore datastore, MappingContext mappingContext,
ApplicationEventPublisher publisher, TPCacheAdapterRepository cacheAdapterRepository) {
this(datastore, mappingContext, publisher, false);
this.cacheAdapterRepository = cacheAdapterRepository;
}
public AbstractSession(Datastore datastore, MappingContext mappingContext,
ApplicationEventPublisher publisher, TPCacheAdapterRepository cacheAdapterRepository, boolean stateless) {
this(datastore, mappingContext, publisher, stateless);
this.cacheAdapterRepository = cacheAdapterRepository;
}
@Override
public boolean isSchemaless() {
return this.datastore.isSchemaless();
}
@Override
public boolean isStateless() {
return this.stateless;
}
public void addPostFlushOperation(Runnable runnable) {
if (runnable != null && !postFlushOperations.contains(runnable)) {
postFlushOperations.add(runnable);
}
}
public void addPendingInsert(PendingInsert insert) {
final Object o = insert.getObject();
if (o != null) {
registerPending(o);
}
Collection<PendingInsert> inserts = pendingInserts.get(insert.getEntity());
if (inserts == null) {
inserts = new ConcurrentLinkedQueue<>();
pendingInserts.put(insert.getEntity(), inserts);
}
inserts.add(insert);
}
@Override
public boolean isPendingAlready(Object obj) {
Serializable id = getPersister(obj).getObjectIdentifier(obj);
if (id != null) {
return objectsPendingOperations.contains(id);
} else {
return objectsPendingOperations.contains(System.identityHashCode(obj));
}
}
@Override
public void registerPending(Object obj) {
if (obj != null) {
Serializable id = getPersister(obj).getObjectIdentifier(obj);
if (id != null) {
if (!objectsPendingOperations.contains(id)) {
objectsPendingOperations.add(id);
}
} else {
final int identityHashCode = System.identityHashCode(obj);
if (!objectsPendingOperations.contains(identityHashCode)) {
objectsPendingOperations.add(identityHashCode);
}
}
}
}
public void addPendingUpdate(PendingUpdate update) {
final Object o = update.getObject();
if (o != null) {
registerPending(o);
}
Collection<PendingUpdate> inserts = pendingUpdates.get(update.getEntity());
if (inserts == null) {
inserts = new ConcurrentLinkedQueue<>();
pendingUpdates.put(update.getEntity(), inserts);
}
inserts.add(update);
}
public void addPendingDelete(PendingDelete delete) {
final Object o = delete.getObject();
if (o != null) {
registerPending(o);
}
Collection<PendingDelete> deletes = pendingDeletes.get(delete.getEntity());
if (deletes == null) {
deletes = new ConcurrentLinkedQueue<>();
pendingDeletes.put(delete.getEntity(), deletes);
}
deletes.add(delete);
}
public Object getCachedEntry(PersistentEntity entity, Serializable key) {
if (isStateless(entity)) return null;
return getCachedEntry(entity, key, false);
}
public Object getCachedEntry(PersistentEntity entity, Serializable key, boolean forDirtyCheck) {
if (isStateless(entity)) return null;
if (key == null) {
return null;
}
return getEntryCache(entity.getJavaClass(), forDirtyCheck).get(key);
}
public void cacheEntry(PersistentEntity entity, Serializable key, Object entry) {
if (isStateless(entity)) return;
if (key == null || entry == null) {
return;
}
cacheEntry(key, entry, getEntryCache(entity.getJavaClass(), true), true);
cacheEntry(key, entry, getEntryCache(entity.getJavaClass(), false), false);
}
public boolean isStateless(PersistentEntity entity) {
Entity mappedForm = entity != null ? entity.getMapping().getMappedForm() : null;
return isStateless() || (mappedForm != null && mappedForm.isStateless());
}
protected void cacheEntry(Serializable key, Object entry, Map<Serializable, Object> entryCache, boolean forDirtyCheck) {
if (isStateless()) return;
entryCache.put(key, entry);
}
public Collection getCachedCollection(PersistentEntity entity, Serializable key, String name) {
if (isStateless(entity)) return null;
if (key == null || name == null) {
return null;
}
return firstLevelCollectionCache.get(
new CollectionKey(entity.getJavaClass(), key, name));
}
public void cacheCollection(PersistentEntity entity, Serializable key, Collection collection, String name) {
if (isStateless(entity)) return;
if (key == null || collection == null || name == null) {
return;
}
firstLevelCollectionCache.put(
new CollectionKey(entity.getJavaClass(), key, name),
collection);
}
public Map<PersistentEntity, Collection<PendingInsert>> getPendingInserts() {
return pendingInserts;
}
public Map<PersistentEntity, Collection<PendingUpdate>> getPendingUpdates() {
return pendingUpdates;
}
public Map<PersistentEntity, Collection<PendingDelete>> getPendingDeletes() {
return pendingDeletes;
}
public FlushModeType getFlushMode() {
return flushMode;
}
public void setFlushMode(FlushModeType flushMode) {
this.flushMode = flushMode;
}
public Datastore getDatastore() {
return datastore;
}
public MappingContext getMappingContext() {
return mappingContext;
}
public void flush() {
if (flushActive) return;
boolean hasInserts;
try {
if (exceptionOccurred) {
throw new InvalidDataAccessResourceUsageException(
"Do not flush() the Session after an exception occurs");
}
flushActive = true;
hasInserts = hasUpdates();
if (hasInserts) {
flushPendingInserts(pendingInserts);
flushPendingUpdates(pendingUpdates);
flushPendingDeletes(pendingDeletes);
firstLevelCollectionCache.clear();
executePendings(postFlushOperations);
}
} finally {
clearPendingOperations();
flushActive = false;
}
postFlush(hasInserts);
}
protected void flushPendingDeletes(Map<PersistentEntity, Collection<PendingDelete>> pendingDeletes) {
final Collection<Collection<PendingDelete>> deletes = pendingDeletes.values();
for (Collection<PendingDelete> delete : deletes) {
flushPendingOperations(delete);
}
}
public boolean isDirty(Object instance) {
if (instance == null) {
return false;
}
EntityPersister persister = (EntityPersister) getPersister(instance);
if (persister == null) {
return false;
}
if (instance instanceof DirtyCheckable) {
return ((DirtyCheckable) instance).hasChanged() || DirtyCheckingSupport.areAssociationsDirty(this, persister.getPersistentEntity(), instance);
}
if (!(persister instanceof NativeEntryEntityPersister)) {
return false;
}
Serializable id = persister.getObjectIdentifier(instance);
if (id == null) {
// not persistent
return false;
}
Object entry = getCachedEntry(persister.getPersistentEntity(), id, false);
Object instance2 = getCachedInstance(instance.getClass(), id);
return instance != instance2 || ((NativeEntryEntityPersister) persister).isDirty(instance, entry);
}
@Override
public Serializable getObjectIdentifier(Object instance) {
Persister persister = getPersister(instance);
if (persister != null) {
return persister.getObjectIdentifier(instance);
}
return null;
}
/**
* The default implementation of flushPendingUpdates is to iterate over each update operation
* and execute them one by one. This may be suboptimal for stores that support batch update
* operations. Subclasses can override this method to implement batch update more efficiently.
*
* @param updates
*/
protected void flushPendingUpdates(Map<PersistentEntity, Collection<PendingUpdate>> updates) {
for (Collection<PendingUpdate> pending : updates.values()) {
flushPendingOperations(pending);
}
}
/**
* The default implementation of flushPendingInserts is to iterate over each insert operations
* and execute them one by one. This may be suboptimal for stores that support batch insert
* operations. Subclasses can override this method to implement batch insert more efficiently.
*
* @param inserts The insert operations
*/
protected void flushPendingInserts(Map<PersistentEntity, Collection<PendingInsert>> inserts) {
for (Collection<PendingInsert> pending : inserts.values()) {
flushPendingOperations(pending);
}
}
private void flushPendingOperations(Collection operations) {
for (Object o : operations) {
PendingOperation pendingOperation = (PendingOperation) o;
try {
PendingOperationExecution.executePendingOperation(pendingOperation);
} catch (RuntimeException e) {
setFlushMode(FlushModeType.COMMIT);
exceptionOccurred = true;
throw e;
}
}
}
private boolean hasUpdates() {
return !pendingInserts.isEmpty() || !pendingUpdates.isEmpty() || !pendingDeletes.isEmpty() || !postFlushOperations.isEmpty();
}
protected void postFlush(boolean hasUpdates) {
// do nothing
}
protected void executePendings(Collection<? extends Runnable> pendings) {
try {
for (Runnable pending : pendings) {
pending.run();
}
} catch (RuntimeException e) {
setFlushMode(FlushModeType.COMMIT);
exceptionOccurred = true;
throw e;
}
}
public void clear() {
clearMaps(firstLevelCache);
clearMaps(firstLevelEntryCache);
clearMaps(firstLevelEntryCacheDirtyCheck);
firstLevelCollectionCache.clear();
clearPendingOperations();
attributes.clear();
exceptionOccurred = false;
}
protected void clearPendingOperations() {
objectsPendingOperations.clear();
pendingInserts.clear();
pendingUpdates.clear();
pendingDeletes.clear();
postFlushOperations.clear();
}
private void clearMaps(Map<Class, Map<Serializable, Object>> mapOfMaps) {
for (Map<Serializable, Object> cache : mapOfMaps.values()) {
cache.clear();
}
}
public final Persister getPersister(Object o) {
if (o == null) return null;
Class cls;
if (o instanceof Class) {
cls = (Class) o;
} else if (o instanceof PersistentEntity) {
cls = ((PersistentEntity) o).getJavaClass();
} else {
cls = o.getClass();
}
Persister p = persisters.get(cls);
if (p == null) {
p = createPersister(cls, getMappingContext());
if (p != null) {
if (!isStateless(((EntityPersister) p).getPersistentEntity())) {
firstLevelCache.put(cls, new ConcurrentHashMap<>());
}
persisters.put(cls, p);
}
}
return p;
}
protected abstract Persister createPersister(Class cls, MappingContext mappingContext);
public boolean contains(Object o) {
if (o == null || isStateless()) {
return false;
}
final Serializable identifier = getObjectIdentifier(o);
if (identifier != null) {
return getInstanceCache(o.getClass()).containsKey(identifier);
} else {
return getInstanceCache(o.getClass()).containsValue(o);
}
}
public boolean isCached(Class type, Serializable key) {
PersistentEntity entity = getMappingContext().getPersistentEntity(type.getName());
if (type == null || key == null || isStateless(entity)) {
return false;
}
return getInstanceCache(type).containsKey(key);
}
public void cacheInstance(Class type, Serializable key, Object instance) {
if (type == null || key == null || instance == null) {
return;
}
if (isStateless(getMappingContext().getPersistentEntity(type.getName()))) return;
getInstanceCache(type).put(key, instance);
}
public Object getCachedInstance(Class type, Serializable key) {
if (isStateless()) return null;
if (type == null || key == null) {
return null;
}
if (isStateless(getMappingContext().getPersistentEntity(type.getName()))) return null;
return getInstanceCache(type).get(key);
}
public void clear(Object o) {
if (o == null || isStateless()) {
return;
}
final Map<Serializable, Object> cache = firstLevelCache.get(o.getClass());
if (cache != null) {
Persister persister = getPersister(o);
Serializable key = persister.getObjectIdentifier(o);
if (key != null) {
cache.remove(key);
}
}
removeAttributesForEntity(o);
}
public void attach(Object o) {
if (o == null) {
return;
}
EntityPersister p = (EntityPersister) getPersister(o);
if (p == null) {
return;
}
Serializable identifier = p.getObjectIdentifier(o);
if (identifier != null) {
cacheObject(identifier, o);
}
}
protected void cacheObject(Serializable identifier, Object o) {
if (identifier == null || o == null) {
return;
}
cacheInstance(o.getClass(), identifier, o);
}
public Serializable persist(Object o) {
Assert.notNull(o, "Cannot persist null object");
Persister persister = getPersister(o);
if (persister == null) {
throw new NonPersistentTypeException("Object [" + o +
"] cannot be persisted. It is not a known persistent type.");
}
final Serializable key = persister.persist(o);
cacheObject(key, o);
return key;
}
@Override
public Serializable insert(Object o) {
Assert.notNull(o, "Cannot persist null object");
Persister persister = getPersister(o);
if (persister == null) {
throw new NonPersistentTypeException("Object [" + o +
"] cannot be persisted. It is not a known persistent type.");
}
final Serializable key = persister.insert(o);
cacheObject(key, o);
return key;
}
public void refresh(Object o) {
Assert.notNull(o, "Cannot persist null object");
Persister persister = getPersister(o);
if (persister == null) {
throw new NonPersistentTypeException("Object [" + o +
"] cannot be refreshed. It is not a known persistent type.");
}
final Serializable key = persister.refresh(o);
cacheObject(key, o);
}
public Object retrieve(Class type, Serializable key) {
if (key == null || type == null || NULL.equals(key)) {
return null;
}
Persister persister = getPersister(type);
if (persister == null) {
throw new NonPersistentTypeException("Cannot retrieve object with key [" + key +
"]. The class [" + type.getName() + "] is not a known persistent type.");
}
final PersistentEntity entity = ((EntityPersister) persister).getPersistentEntity();
if (entity != null) {
final PersistentProperty identity = entity.getIdentity();
if (!identity.getType().isAssignableFrom(key.getClass())) {
key = convertIdentityIfNecessasry(identity, key);
}
}
if (key == null) {
return null;
}
Object o = getInstanceCache(type).get(key);
if (o == null) {
o = persister.retrieve(key);
if (o != null) {
cacheObject(key, o);
}
}
return o;
}
protected Serializable convertIdentityIfNecessasry(PersistentProperty identity, Serializable key) {
ConversionService conversionService = getMappingContext().getConversionService();
if (conversionService.canConvert(key.getClass(), identity.getType())) {
try {
key = (Serializable) conversionService.convert(key, identity.getType());
} catch (ConversionFailedException conversionFailedException) {
// ignore
}
}
return key;
}
public Object proxy(Class type, Serializable key) {
if (key == null || type == null) {
return null;
}
Persister persister = getPersister(type);
if (persister == null) {
throw new NonPersistentTypeException("Cannot retrieve object with key [" + key +
"]. The class [" + type.getName() + "] is not a known persistent type.");
}
// only return proxy if real instance is not available.
Object o = getInstanceCache(type).get(key);
if (o == null) {
o = persister.proxy(key);
}
return o;
}
public void lock(Object o) {
throw new UnsupportedOperationException("Datastore [" + getClass().getName() + "] does not support locking.");
}
public Object lock(Class type, Serializable key) {
throw new UnsupportedOperationException("Datastore [" + getClass().getName() + "] does not support locking.");
}
public void unlock(Object o) {
if (o != null) {
lockedObjects.remove(o);
}
}
/**
* This default implementation of the deleteAll method is unlikely to be optimal as it iterates and deletes each object.
* <p>
* Subclasses should override to optimize for the batch operation capability of the underlying datastore
*
* @param criteria The criteria
*/
public long deleteAll(QueryableCriteria criteria) {
List list = criteria.list();
delete(list);
return list.size();
}
/**
* This default implementation of updateAll is unlikely to be optimal as it iterates and updates each object one by one.
* <p>
* Subclasses should override to optimize for the batch operation capability of the underlying datastore
*
* @param criteria The criteria
* @param properties The properties
*/
public long updateAll(QueryableCriteria criteria, Map<String, Object> properties) {
List list = criteria.list();
for (Object o : list) {
BeanWrapper bean = new BeanWrapperImpl(o);
for (String property : properties.keySet()) {
bean.setPropertyValue(property, properties.get(property));
}
}
persist(list);
return list.size();
}
public void delete(final Object obj) {
if (obj == null) {
return;
}
final EntityPersister p = (EntityPersister) getPersister(obj);
if (p == null) {
return;
}
p.delete(obj);
clear(obj);
}
public void delete(final Iterable objects) {
if (objects == null) {
return;
}
// sort the objects into sets by Persister, in case the objects are of different types.
Map<Persister, List> toDelete = new HashMap<>();
for (Object object : objects) {
if (object == null) {
continue;
}
final Persister p = getPersister(object);
if (p == null) {
continue;
}
List listForPersister = toDelete.get(p);
if (listForPersister == null) {
toDelete.put(p, listForPersister = new ArrayList());
}
listForPersister.add(object);
}
// for each type (usually only 1 type), set up a pendingDelete of that type
for (Map.Entry<Persister, List> entry : toDelete.entrySet()) {
final EntityPersister p = (EntityPersister) entry.getKey();
p.delete(entry.getValue());
}
}
public List<Serializable> persist(Iterable objects) {
if (objects == null) {
return Collections.emptyList();
}
final Iterator i = objects.iterator();
if (!i.hasNext()) {
return Collections.emptyList();
}
// peek at the first object to get the persister
final Object obj = i.next();
final Persister p = getPersister(obj);
if (p == null) {
throw new NonPersistentTypeException("Cannot persist objects. The class [" +
obj.getClass().getName() + "] is not a known persistent type.");
}
return p.persist(objects);
}
public List retrieveAll(Class type, Iterable keys) {
EntityPersister p = (EntityPersister) getPersister(type);
if (p == null) {
throw new NonPersistentTypeException("Cannot retrieve objects with keys [" + keys +
"]. The class [" + type.getName() + "] is not a known persistent type.");
}
List list = new ArrayList();
List<Serializable> toRetrieve = new ArrayList<>();
final Map<Serializable, Object> cache = getInstanceCache(type);
for (Object key : keys) {
Serializable serializable = (Serializable) key;
Object cached = cache.get(serializable);
list.add(cached);
if (cached == null) {
toRetrieve.add(serializable);
}
}
List<Object> retrieved = p.retrieveAll(toRetrieve);
Iterator<Serializable> keyIterator = toRetrieve.iterator();
Map<Serializable, Object> retrievedMap = new HashMap<>();
for (Object o : retrieved) {
final Serializable identifier = p.getObjectIdentifier(o);
if (identifier != null) {
retrievedMap.put(identifier, o);
}
}
// now fill in the null entries (possibly with more nulls)
for (int i = 0; i < list.size(); i++) {
Object o = list.get(i);
if (o == null) {
if (keyIterator.hasNext()) {
Serializable key = keyIterator.next();
key = (Serializable) mappingContext.getConversionService().convert(key, p.getPersistentEntity().getIdentity().getType());
final Object next = retrievedMap.get(key);
list.set(i, next);
cacheInstance(type, key, next);
}
}
}
return list;
}
public List retrieveAll(Class type, Serializable... keys) {
Persister p = getPersister(type);
if (p == null) {
throw new NonPersistentTypeException("Cannot retrieve objects with keys [" + keys +
"]. The class [" + type.getName() + "] is not a known persistent type.");
}
return retrieveAll(type, Arrays.asList(keys));
}
public Query createQuery(Class type) {
Persister p = getPersister(type);
if (p == null) {
throw new NonPersistentTypeException("Cannot create query. The class [" + type +
"] is not a known persistent type.");
}
return p.createQuery();
}
public final Transaction beginTransaction() {
return beginTransaction(new DefaultTransactionDefinition());
}
@Override
public Transaction beginTransaction(TransactionDefinition definition) {
transaction = beginTransactionInternal();
return transaction;
}
protected abstract Transaction beginTransactionInternal();
public Transaction getTransaction() {
if (transaction == null) {
throw new NoTransactionException("Transaction not started. Call beginTransaction() first");
}
return transaction;
}
@Override
public boolean hasTransaction() {
return transaction != null;
}
private Map<Serializable, Object> getInstanceCache(Class c) {
Map<Serializable, Object> cache = firstLevelCache.get(c);
if (cache == null) {
cache = new ConcurrentHashMap<>();
firstLevelCache.put(c, cache);
}
return cache;
}
private Map<Serializable, Object> getEntryCache(Class c, boolean forDirtyCheck) {
Map<Class, Map<Serializable, Object>> caches = forDirtyCheck ? firstLevelEntryCacheDirtyCheck : firstLevelEntryCache;
Map<Serializable, Object> cache = caches.get(c);
if (cache == null) {
cache = new ConcurrentHashMap<>();
caches.put(c, cache);
}
return cache;
}
@Override
public EntityAccess createEntityAccess(PersistentEntity entity, Object instance) {
return getMappingContext().createEntityAccess(entity, instance);
}
/**
* Whether the session is synchronized with an external transaction
*
* @param isSynchronizedWithTransaction True if it is
*/
public void setSynchronizedWithTransaction(boolean isSynchronizedWithTransaction) {
this.isSynchronizedWithTransaction = isSynchronizedWithTransaction;
}
private static class CollectionKey {
final Class clazz;
final Serializable key;
final String collectionName;
private CollectionKey(Class clazz, Serializable key, String collectionName) {
this.clazz = clazz;
this.key = key;
this.collectionName = collectionName;
}
@Override
public int hashCode() {
int value = 17;
value = value * 37 + clazz.getName().hashCode();
value = value * 37 + key.hashCode();
value = value * 37 + collectionName.hashCode();
return value;
}
@Override
public boolean equals(Object obj) {
CollectionKey other = (CollectionKey) obj;
return other.clazz.getName() == clazz.getName() &&
other.key.equals(key) &&
other.collectionName.equals(collectionName);
}
@Override
public String toString() {
return clazz.getName() + ':' + key + ':' + collectionName;
}
}
}
|
googleapis/google-cloud-java | 34,973 | java-analytics-admin/proto-google-analytics-admin-v1alpha/src/main/java/com/google/analytics/admin/v1alpha/ChannelGroupFilterExpressionList.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/analytics/admin/v1alpha/channel_group.proto
// Protobuf Java Version: 3.25.8
package com.google.analytics.admin.v1alpha;
/**
*
*
* <pre>
* A list of Channel Group filter expressions.
* </pre>
*
* Protobuf type {@code google.analytics.admin.v1alpha.ChannelGroupFilterExpressionList}
*/
public final class ChannelGroupFilterExpressionList extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.analytics.admin.v1alpha.ChannelGroupFilterExpressionList)
ChannelGroupFilterExpressionListOrBuilder {
private static final long serialVersionUID = 0L;
// Use ChannelGroupFilterExpressionList.newBuilder() to construct.
private ChannelGroupFilterExpressionList(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ChannelGroupFilterExpressionList() {
filterExpressions_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ChannelGroupFilterExpressionList();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.analytics.admin.v1alpha.ChannelGroupProto
.internal_static_google_analytics_admin_v1alpha_ChannelGroupFilterExpressionList_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.analytics.admin.v1alpha.ChannelGroupProto
.internal_static_google_analytics_admin_v1alpha_ChannelGroupFilterExpressionList_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.analytics.admin.v1alpha.ChannelGroupFilterExpressionList.class,
com.google.analytics.admin.v1alpha.ChannelGroupFilterExpressionList.Builder.class);
}
public static final int FILTER_EXPRESSIONS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.analytics.admin.v1alpha.ChannelGroupFilterExpression>
filterExpressions_;
/**
*
*
* <pre>
* A list of Channel Group filter expressions.
* </pre>
*
* <code>
* repeated .google.analytics.admin.v1alpha.ChannelGroupFilterExpression filter_expressions = 1;
* </code>
*/
@java.lang.Override
public java.util.List<com.google.analytics.admin.v1alpha.ChannelGroupFilterExpression>
getFilterExpressionsList() {
return filterExpressions_;
}
/**
*
*
* <pre>
* A list of Channel Group filter expressions.
* </pre>
*
* <code>
* repeated .google.analytics.admin.v1alpha.ChannelGroupFilterExpression filter_expressions = 1;
* </code>
*/
@java.lang.Override
public java.util.List<
? extends com.google.analytics.admin.v1alpha.ChannelGroupFilterExpressionOrBuilder>
getFilterExpressionsOrBuilderList() {
return filterExpressions_;
}
/**
*
*
* <pre>
* A list of Channel Group filter expressions.
* </pre>
*
* <code>
* repeated .google.analytics.admin.v1alpha.ChannelGroupFilterExpression filter_expressions = 1;
* </code>
*/
@java.lang.Override
public int getFilterExpressionsCount() {
return filterExpressions_.size();
}
/**
*
*
* <pre>
* A list of Channel Group filter expressions.
* </pre>
*
* <code>
* repeated .google.analytics.admin.v1alpha.ChannelGroupFilterExpression filter_expressions = 1;
* </code>
*/
@java.lang.Override
public com.google.analytics.admin.v1alpha.ChannelGroupFilterExpression getFilterExpressions(
int index) {
return filterExpressions_.get(index);
}
/**
*
*
* <pre>
* A list of Channel Group filter expressions.
* </pre>
*
* <code>
* repeated .google.analytics.admin.v1alpha.ChannelGroupFilterExpression filter_expressions = 1;
* </code>
*/
@java.lang.Override
public com.google.analytics.admin.v1alpha.ChannelGroupFilterExpressionOrBuilder
getFilterExpressionsOrBuilder(int index) {
return filterExpressions_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < filterExpressions_.size(); i++) {
output.writeMessage(1, filterExpressions_.get(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < filterExpressions_.size(); i++) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(1, filterExpressions_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.analytics.admin.v1alpha.ChannelGroupFilterExpressionList)) {
return super.equals(obj);
}
com.google.analytics.admin.v1alpha.ChannelGroupFilterExpressionList other =
(com.google.analytics.admin.v1alpha.ChannelGroupFilterExpressionList) obj;
if (!getFilterExpressionsList().equals(other.getFilterExpressionsList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getFilterExpressionsCount() > 0) {
hash = (37 * hash) + FILTER_EXPRESSIONS_FIELD_NUMBER;
hash = (53 * hash) + getFilterExpressionsList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.analytics.admin.v1alpha.ChannelGroupFilterExpressionList parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.analytics.admin.v1alpha.ChannelGroupFilterExpressionList parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.analytics.admin.v1alpha.ChannelGroupFilterExpressionList parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.analytics.admin.v1alpha.ChannelGroupFilterExpressionList parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.analytics.admin.v1alpha.ChannelGroupFilterExpressionList parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.analytics.admin.v1alpha.ChannelGroupFilterExpressionList parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.analytics.admin.v1alpha.ChannelGroupFilterExpressionList parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.analytics.admin.v1alpha.ChannelGroupFilterExpressionList parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.analytics.admin.v1alpha.ChannelGroupFilterExpressionList
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.analytics.admin.v1alpha.ChannelGroupFilterExpressionList
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.analytics.admin.v1alpha.ChannelGroupFilterExpressionList parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.analytics.admin.v1alpha.ChannelGroupFilterExpressionList parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.analytics.admin.v1alpha.ChannelGroupFilterExpressionList prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* A list of Channel Group filter expressions.
* </pre>
*
* Protobuf type {@code google.analytics.admin.v1alpha.ChannelGroupFilterExpressionList}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.analytics.admin.v1alpha.ChannelGroupFilterExpressionList)
com.google.analytics.admin.v1alpha.ChannelGroupFilterExpressionListOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.analytics.admin.v1alpha.ChannelGroupProto
.internal_static_google_analytics_admin_v1alpha_ChannelGroupFilterExpressionList_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.analytics.admin.v1alpha.ChannelGroupProto
.internal_static_google_analytics_admin_v1alpha_ChannelGroupFilterExpressionList_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.analytics.admin.v1alpha.ChannelGroupFilterExpressionList.class,
com.google.analytics.admin.v1alpha.ChannelGroupFilterExpressionList.Builder.class);
}
// Construct using
// com.google.analytics.admin.v1alpha.ChannelGroupFilterExpressionList.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (filterExpressionsBuilder_ == null) {
filterExpressions_ = java.util.Collections.emptyList();
} else {
filterExpressions_ = null;
filterExpressionsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.analytics.admin.v1alpha.ChannelGroupProto
.internal_static_google_analytics_admin_v1alpha_ChannelGroupFilterExpressionList_descriptor;
}
@java.lang.Override
public com.google.analytics.admin.v1alpha.ChannelGroupFilterExpressionList
getDefaultInstanceForType() {
return com.google.analytics.admin.v1alpha.ChannelGroupFilterExpressionList
.getDefaultInstance();
}
@java.lang.Override
public com.google.analytics.admin.v1alpha.ChannelGroupFilterExpressionList build() {
com.google.analytics.admin.v1alpha.ChannelGroupFilterExpressionList result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.analytics.admin.v1alpha.ChannelGroupFilterExpressionList buildPartial() {
com.google.analytics.admin.v1alpha.ChannelGroupFilterExpressionList result =
new com.google.analytics.admin.v1alpha.ChannelGroupFilterExpressionList(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.analytics.admin.v1alpha.ChannelGroupFilterExpressionList result) {
if (filterExpressionsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
filterExpressions_ = java.util.Collections.unmodifiableList(filterExpressions_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.filterExpressions_ = filterExpressions_;
} else {
result.filterExpressions_ = filterExpressionsBuilder_.build();
}
}
private void buildPartial0(
com.google.analytics.admin.v1alpha.ChannelGroupFilterExpressionList result) {
int from_bitField0_ = bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.analytics.admin.v1alpha.ChannelGroupFilterExpressionList) {
return mergeFrom(
(com.google.analytics.admin.v1alpha.ChannelGroupFilterExpressionList) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.analytics.admin.v1alpha.ChannelGroupFilterExpressionList other) {
if (other
== com.google.analytics.admin.v1alpha.ChannelGroupFilterExpressionList
.getDefaultInstance()) return this;
if (filterExpressionsBuilder_ == null) {
if (!other.filterExpressions_.isEmpty()) {
if (filterExpressions_.isEmpty()) {
filterExpressions_ = other.filterExpressions_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureFilterExpressionsIsMutable();
filterExpressions_.addAll(other.filterExpressions_);
}
onChanged();
}
} else {
if (!other.filterExpressions_.isEmpty()) {
if (filterExpressionsBuilder_.isEmpty()) {
filterExpressionsBuilder_.dispose();
filterExpressionsBuilder_ = null;
filterExpressions_ = other.filterExpressions_;
bitField0_ = (bitField0_ & ~0x00000001);
filterExpressionsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getFilterExpressionsFieldBuilder()
: null;
} else {
filterExpressionsBuilder_.addAllMessages(other.filterExpressions_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.analytics.admin.v1alpha.ChannelGroupFilterExpression m =
input.readMessage(
com.google.analytics.admin.v1alpha.ChannelGroupFilterExpression.parser(),
extensionRegistry);
if (filterExpressionsBuilder_ == null) {
ensureFilterExpressionsIsMutable();
filterExpressions_.add(m);
} else {
filterExpressionsBuilder_.addMessage(m);
}
break;
} // case 10
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.analytics.admin.v1alpha.ChannelGroupFilterExpression>
filterExpressions_ = java.util.Collections.emptyList();
private void ensureFilterExpressionsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
filterExpressions_ =
new java.util.ArrayList<
com.google.analytics.admin.v1alpha.ChannelGroupFilterExpression>(
filterExpressions_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.analytics.admin.v1alpha.ChannelGroupFilterExpression,
com.google.analytics.admin.v1alpha.ChannelGroupFilterExpression.Builder,
com.google.analytics.admin.v1alpha.ChannelGroupFilterExpressionOrBuilder>
filterExpressionsBuilder_;
/**
*
*
* <pre>
* A list of Channel Group filter expressions.
* </pre>
*
* <code>
* repeated .google.analytics.admin.v1alpha.ChannelGroupFilterExpression filter_expressions = 1;
* </code>
*/
public java.util.List<com.google.analytics.admin.v1alpha.ChannelGroupFilterExpression>
getFilterExpressionsList() {
if (filterExpressionsBuilder_ == null) {
return java.util.Collections.unmodifiableList(filterExpressions_);
} else {
return filterExpressionsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* A list of Channel Group filter expressions.
* </pre>
*
* <code>
* repeated .google.analytics.admin.v1alpha.ChannelGroupFilterExpression filter_expressions = 1;
* </code>
*/
public int getFilterExpressionsCount() {
if (filterExpressionsBuilder_ == null) {
return filterExpressions_.size();
} else {
return filterExpressionsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* A list of Channel Group filter expressions.
* </pre>
*
* <code>
* repeated .google.analytics.admin.v1alpha.ChannelGroupFilterExpression filter_expressions = 1;
* </code>
*/
public com.google.analytics.admin.v1alpha.ChannelGroupFilterExpression getFilterExpressions(
int index) {
if (filterExpressionsBuilder_ == null) {
return filterExpressions_.get(index);
} else {
return filterExpressionsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* A list of Channel Group filter expressions.
* </pre>
*
* <code>
* repeated .google.analytics.admin.v1alpha.ChannelGroupFilterExpression filter_expressions = 1;
* </code>
*/
public Builder setFilterExpressions(
int index, com.google.analytics.admin.v1alpha.ChannelGroupFilterExpression value) {
if (filterExpressionsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureFilterExpressionsIsMutable();
filterExpressions_.set(index, value);
onChanged();
} else {
filterExpressionsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* A list of Channel Group filter expressions.
* </pre>
*
* <code>
* repeated .google.analytics.admin.v1alpha.ChannelGroupFilterExpression filter_expressions = 1;
* </code>
*/
public Builder setFilterExpressions(
int index,
com.google.analytics.admin.v1alpha.ChannelGroupFilterExpression.Builder builderForValue) {
if (filterExpressionsBuilder_ == null) {
ensureFilterExpressionsIsMutable();
filterExpressions_.set(index, builderForValue.build());
onChanged();
} else {
filterExpressionsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* A list of Channel Group filter expressions.
* </pre>
*
* <code>
* repeated .google.analytics.admin.v1alpha.ChannelGroupFilterExpression filter_expressions = 1;
* </code>
*/
public Builder addFilterExpressions(
com.google.analytics.admin.v1alpha.ChannelGroupFilterExpression value) {
if (filterExpressionsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureFilterExpressionsIsMutable();
filterExpressions_.add(value);
onChanged();
} else {
filterExpressionsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* A list of Channel Group filter expressions.
* </pre>
*
* <code>
* repeated .google.analytics.admin.v1alpha.ChannelGroupFilterExpression filter_expressions = 1;
* </code>
*/
public Builder addFilterExpressions(
int index, com.google.analytics.admin.v1alpha.ChannelGroupFilterExpression value) {
if (filterExpressionsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureFilterExpressionsIsMutable();
filterExpressions_.add(index, value);
onChanged();
} else {
filterExpressionsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* A list of Channel Group filter expressions.
* </pre>
*
* <code>
* repeated .google.analytics.admin.v1alpha.ChannelGroupFilterExpression filter_expressions = 1;
* </code>
*/
public Builder addFilterExpressions(
com.google.analytics.admin.v1alpha.ChannelGroupFilterExpression.Builder builderForValue) {
if (filterExpressionsBuilder_ == null) {
ensureFilterExpressionsIsMutable();
filterExpressions_.add(builderForValue.build());
onChanged();
} else {
filterExpressionsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* A list of Channel Group filter expressions.
* </pre>
*
* <code>
* repeated .google.analytics.admin.v1alpha.ChannelGroupFilterExpression filter_expressions = 1;
* </code>
*/
public Builder addFilterExpressions(
int index,
com.google.analytics.admin.v1alpha.ChannelGroupFilterExpression.Builder builderForValue) {
if (filterExpressionsBuilder_ == null) {
ensureFilterExpressionsIsMutable();
filterExpressions_.add(index, builderForValue.build());
onChanged();
} else {
filterExpressionsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* A list of Channel Group filter expressions.
* </pre>
*
* <code>
* repeated .google.analytics.admin.v1alpha.ChannelGroupFilterExpression filter_expressions = 1;
* </code>
*/
public Builder addAllFilterExpressions(
java.lang.Iterable<
? extends com.google.analytics.admin.v1alpha.ChannelGroupFilterExpression>
values) {
if (filterExpressionsBuilder_ == null) {
ensureFilterExpressionsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, filterExpressions_);
onChanged();
} else {
filterExpressionsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* A list of Channel Group filter expressions.
* </pre>
*
* <code>
* repeated .google.analytics.admin.v1alpha.ChannelGroupFilterExpression filter_expressions = 1;
* </code>
*/
public Builder clearFilterExpressions() {
if (filterExpressionsBuilder_ == null) {
filterExpressions_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
filterExpressionsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* A list of Channel Group filter expressions.
* </pre>
*
* <code>
* repeated .google.analytics.admin.v1alpha.ChannelGroupFilterExpression filter_expressions = 1;
* </code>
*/
public Builder removeFilterExpressions(int index) {
if (filterExpressionsBuilder_ == null) {
ensureFilterExpressionsIsMutable();
filterExpressions_.remove(index);
onChanged();
} else {
filterExpressionsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* A list of Channel Group filter expressions.
* </pre>
*
* <code>
* repeated .google.analytics.admin.v1alpha.ChannelGroupFilterExpression filter_expressions = 1;
* </code>
*/
public com.google.analytics.admin.v1alpha.ChannelGroupFilterExpression.Builder
getFilterExpressionsBuilder(int index) {
return getFilterExpressionsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* A list of Channel Group filter expressions.
* </pre>
*
* <code>
* repeated .google.analytics.admin.v1alpha.ChannelGroupFilterExpression filter_expressions = 1;
* </code>
*/
public com.google.analytics.admin.v1alpha.ChannelGroupFilterExpressionOrBuilder
getFilterExpressionsOrBuilder(int index) {
if (filterExpressionsBuilder_ == null) {
return filterExpressions_.get(index);
} else {
return filterExpressionsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* A list of Channel Group filter expressions.
* </pre>
*
* <code>
* repeated .google.analytics.admin.v1alpha.ChannelGroupFilterExpression filter_expressions = 1;
* </code>
*/
public java.util.List<
? extends com.google.analytics.admin.v1alpha.ChannelGroupFilterExpressionOrBuilder>
getFilterExpressionsOrBuilderList() {
if (filterExpressionsBuilder_ != null) {
return filterExpressionsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(filterExpressions_);
}
}
/**
*
*
* <pre>
* A list of Channel Group filter expressions.
* </pre>
*
* <code>
* repeated .google.analytics.admin.v1alpha.ChannelGroupFilterExpression filter_expressions = 1;
* </code>
*/
public com.google.analytics.admin.v1alpha.ChannelGroupFilterExpression.Builder
addFilterExpressionsBuilder() {
return getFilterExpressionsFieldBuilder()
.addBuilder(
com.google.analytics.admin.v1alpha.ChannelGroupFilterExpression.getDefaultInstance());
}
/**
*
*
* <pre>
* A list of Channel Group filter expressions.
* </pre>
*
* <code>
* repeated .google.analytics.admin.v1alpha.ChannelGroupFilterExpression filter_expressions = 1;
* </code>
*/
public com.google.analytics.admin.v1alpha.ChannelGroupFilterExpression.Builder
addFilterExpressionsBuilder(int index) {
return getFilterExpressionsFieldBuilder()
.addBuilder(
index,
com.google.analytics.admin.v1alpha.ChannelGroupFilterExpression.getDefaultInstance());
}
/**
*
*
* <pre>
* A list of Channel Group filter expressions.
* </pre>
*
* <code>
* repeated .google.analytics.admin.v1alpha.ChannelGroupFilterExpression filter_expressions = 1;
* </code>
*/
public java.util.List<com.google.analytics.admin.v1alpha.ChannelGroupFilterExpression.Builder>
getFilterExpressionsBuilderList() {
return getFilterExpressionsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.analytics.admin.v1alpha.ChannelGroupFilterExpression,
com.google.analytics.admin.v1alpha.ChannelGroupFilterExpression.Builder,
com.google.analytics.admin.v1alpha.ChannelGroupFilterExpressionOrBuilder>
getFilterExpressionsFieldBuilder() {
if (filterExpressionsBuilder_ == null) {
filterExpressionsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.analytics.admin.v1alpha.ChannelGroupFilterExpression,
com.google.analytics.admin.v1alpha.ChannelGroupFilterExpression.Builder,
com.google.analytics.admin.v1alpha.ChannelGroupFilterExpressionOrBuilder>(
filterExpressions_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
filterExpressions_ = null;
}
return filterExpressionsBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.analytics.admin.v1alpha.ChannelGroupFilterExpressionList)
}
// @@protoc_insertion_point(class_scope:google.analytics.admin.v1alpha.ChannelGroupFilterExpressionList)
private static final com.google.analytics.admin.v1alpha.ChannelGroupFilterExpressionList
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.analytics.admin.v1alpha.ChannelGroupFilterExpressionList();
}
public static com.google.analytics.admin.v1alpha.ChannelGroupFilterExpressionList
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ChannelGroupFilterExpressionList> PARSER =
new com.google.protobuf.AbstractParser<ChannelGroupFilterExpressionList>() {
@java.lang.Override
public ChannelGroupFilterExpressionList parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ChannelGroupFilterExpressionList> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ChannelGroupFilterExpressionList> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.analytics.admin.v1alpha.ChannelGroupFilterExpressionList
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 35,007 | java-managedkafka/proto-google-cloud-managedkafka-v1/src/main/java/com/google/cloud/managedkafka/v1/TlsConfig.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/managedkafka/v1/resources.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.managedkafka.v1;
/**
*
*
* <pre>
* The TLS configuration for the Kafka cluster.
* </pre>
*
* Protobuf type {@code google.cloud.managedkafka.v1.TlsConfig}
*/
public final class TlsConfig extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.managedkafka.v1.TlsConfig)
TlsConfigOrBuilder {
private static final long serialVersionUID = 0L;
// Use TlsConfig.newBuilder() to construct.
private TlsConfig(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private TlsConfig() {
sslPrincipalMappingRules_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new TlsConfig();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.managedkafka.v1.ResourcesProto
.internal_static_google_cloud_managedkafka_v1_TlsConfig_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.managedkafka.v1.ResourcesProto
.internal_static_google_cloud_managedkafka_v1_TlsConfig_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.managedkafka.v1.TlsConfig.class,
com.google.cloud.managedkafka.v1.TlsConfig.Builder.class);
}
private int bitField0_;
public static final int TRUST_CONFIG_FIELD_NUMBER = 1;
private com.google.cloud.managedkafka.v1.TrustConfig trustConfig_;
/**
*
*
* <pre>
* Optional. The configuration of the broker truststore. If specified, clients
* can use mTLS for authentication.
* </pre>
*
* <code>
* .google.cloud.managedkafka.v1.TrustConfig trust_config = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the trustConfig field is set.
*/
@java.lang.Override
public boolean hasTrustConfig() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Optional. The configuration of the broker truststore. If specified, clients
* can use mTLS for authentication.
* </pre>
*
* <code>
* .google.cloud.managedkafka.v1.TrustConfig trust_config = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The trustConfig.
*/
@java.lang.Override
public com.google.cloud.managedkafka.v1.TrustConfig getTrustConfig() {
return trustConfig_ == null
? com.google.cloud.managedkafka.v1.TrustConfig.getDefaultInstance()
: trustConfig_;
}
/**
*
*
* <pre>
* Optional. The configuration of the broker truststore. If specified, clients
* can use mTLS for authentication.
* </pre>
*
* <code>
* .google.cloud.managedkafka.v1.TrustConfig trust_config = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
@java.lang.Override
public com.google.cloud.managedkafka.v1.TrustConfigOrBuilder getTrustConfigOrBuilder() {
return trustConfig_ == null
? com.google.cloud.managedkafka.v1.TrustConfig.getDefaultInstance()
: trustConfig_;
}
public static final int SSL_PRINCIPAL_MAPPING_RULES_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object sslPrincipalMappingRules_ = "";
/**
*
*
* <pre>
* Optional. A list of rules for mapping from SSL principal names to
* short names. These are applied in order by Kafka.
* Refer to the Apache Kafka documentation for `ssl.principal.mapping.rules`
* for the precise formatting details and syntax.
* Example: "RULE:^CN=(.*?),OU=ServiceUsers.*$/$1@example.com/,DEFAULT"
*
* This is a static Kafka broker configuration. Setting or modifying this
* field will trigger a rolling restart of the Kafka brokers to apply
* the change. An empty string means no rules are applied (Kafka default).
* </pre>
*
* <code>string ssl_principal_mapping_rules = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The sslPrincipalMappingRules.
*/
@java.lang.Override
public java.lang.String getSslPrincipalMappingRules() {
java.lang.Object ref = sslPrincipalMappingRules_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
sslPrincipalMappingRules_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. A list of rules for mapping from SSL principal names to
* short names. These are applied in order by Kafka.
* Refer to the Apache Kafka documentation for `ssl.principal.mapping.rules`
* for the precise formatting details and syntax.
* Example: "RULE:^CN=(.*?),OU=ServiceUsers.*$/$1@example.com/,DEFAULT"
*
* This is a static Kafka broker configuration. Setting or modifying this
* field will trigger a rolling restart of the Kafka brokers to apply
* the change. An empty string means no rules are applied (Kafka default).
* </pre>
*
* <code>string ssl_principal_mapping_rules = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for sslPrincipalMappingRules.
*/
@java.lang.Override
public com.google.protobuf.ByteString getSslPrincipalMappingRulesBytes() {
java.lang.Object ref = sslPrincipalMappingRules_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
sslPrincipalMappingRules_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getTrustConfig());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(sslPrincipalMappingRules_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, sslPrincipalMappingRules_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getTrustConfig());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(sslPrincipalMappingRules_)) {
size +=
com.google.protobuf.GeneratedMessageV3.computeStringSize(2, sslPrincipalMappingRules_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.managedkafka.v1.TlsConfig)) {
return super.equals(obj);
}
com.google.cloud.managedkafka.v1.TlsConfig other =
(com.google.cloud.managedkafka.v1.TlsConfig) obj;
if (hasTrustConfig() != other.hasTrustConfig()) return false;
if (hasTrustConfig()) {
if (!getTrustConfig().equals(other.getTrustConfig())) return false;
}
if (!getSslPrincipalMappingRules().equals(other.getSslPrincipalMappingRules())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasTrustConfig()) {
hash = (37 * hash) + TRUST_CONFIG_FIELD_NUMBER;
hash = (53 * hash) + getTrustConfig().hashCode();
}
hash = (37 * hash) + SSL_PRINCIPAL_MAPPING_RULES_FIELD_NUMBER;
hash = (53 * hash) + getSslPrincipalMappingRules().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.managedkafka.v1.TlsConfig parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.managedkafka.v1.TlsConfig parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.managedkafka.v1.TlsConfig parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.managedkafka.v1.TlsConfig parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.managedkafka.v1.TlsConfig parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.managedkafka.v1.TlsConfig parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.managedkafka.v1.TlsConfig parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.managedkafka.v1.TlsConfig parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.managedkafka.v1.TlsConfig parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.managedkafka.v1.TlsConfig parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.managedkafka.v1.TlsConfig parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.managedkafka.v1.TlsConfig parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.managedkafka.v1.TlsConfig prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The TLS configuration for the Kafka cluster.
* </pre>
*
* Protobuf type {@code google.cloud.managedkafka.v1.TlsConfig}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.managedkafka.v1.TlsConfig)
com.google.cloud.managedkafka.v1.TlsConfigOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.managedkafka.v1.ResourcesProto
.internal_static_google_cloud_managedkafka_v1_TlsConfig_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.managedkafka.v1.ResourcesProto
.internal_static_google_cloud_managedkafka_v1_TlsConfig_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.managedkafka.v1.TlsConfig.class,
com.google.cloud.managedkafka.v1.TlsConfig.Builder.class);
}
// Construct using com.google.cloud.managedkafka.v1.TlsConfig.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getTrustConfigFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
trustConfig_ = null;
if (trustConfigBuilder_ != null) {
trustConfigBuilder_.dispose();
trustConfigBuilder_ = null;
}
sslPrincipalMappingRules_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.managedkafka.v1.ResourcesProto
.internal_static_google_cloud_managedkafka_v1_TlsConfig_descriptor;
}
@java.lang.Override
public com.google.cloud.managedkafka.v1.TlsConfig getDefaultInstanceForType() {
return com.google.cloud.managedkafka.v1.TlsConfig.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.managedkafka.v1.TlsConfig build() {
com.google.cloud.managedkafka.v1.TlsConfig result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.managedkafka.v1.TlsConfig buildPartial() {
com.google.cloud.managedkafka.v1.TlsConfig result =
new com.google.cloud.managedkafka.v1.TlsConfig(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.managedkafka.v1.TlsConfig result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.trustConfig_ =
trustConfigBuilder_ == null ? trustConfig_ : trustConfigBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.sslPrincipalMappingRules_ = sslPrincipalMappingRules_;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.managedkafka.v1.TlsConfig) {
return mergeFrom((com.google.cloud.managedkafka.v1.TlsConfig) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.managedkafka.v1.TlsConfig other) {
if (other == com.google.cloud.managedkafka.v1.TlsConfig.getDefaultInstance()) return this;
if (other.hasTrustConfig()) {
mergeTrustConfig(other.getTrustConfig());
}
if (!other.getSslPrincipalMappingRules().isEmpty()) {
sslPrincipalMappingRules_ = other.sslPrincipalMappingRules_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getTrustConfigFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
sslPrincipalMappingRules_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.cloud.managedkafka.v1.TrustConfig trustConfig_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.managedkafka.v1.TrustConfig,
com.google.cloud.managedkafka.v1.TrustConfig.Builder,
com.google.cloud.managedkafka.v1.TrustConfigOrBuilder>
trustConfigBuilder_;
/**
*
*
* <pre>
* Optional. The configuration of the broker truststore. If specified, clients
* can use mTLS for authentication.
* </pre>
*
* <code>
* .google.cloud.managedkafka.v1.TrustConfig trust_config = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the trustConfig field is set.
*/
public boolean hasTrustConfig() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Optional. The configuration of the broker truststore. If specified, clients
* can use mTLS for authentication.
* </pre>
*
* <code>
* .google.cloud.managedkafka.v1.TrustConfig trust_config = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The trustConfig.
*/
public com.google.cloud.managedkafka.v1.TrustConfig getTrustConfig() {
if (trustConfigBuilder_ == null) {
return trustConfig_ == null
? com.google.cloud.managedkafka.v1.TrustConfig.getDefaultInstance()
: trustConfig_;
} else {
return trustConfigBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Optional. The configuration of the broker truststore. If specified, clients
* can use mTLS for authentication.
* </pre>
*
* <code>
* .google.cloud.managedkafka.v1.TrustConfig trust_config = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder setTrustConfig(com.google.cloud.managedkafka.v1.TrustConfig value) {
if (trustConfigBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
trustConfig_ = value;
} else {
trustConfigBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The configuration of the broker truststore. If specified, clients
* can use mTLS for authentication.
* </pre>
*
* <code>
* .google.cloud.managedkafka.v1.TrustConfig trust_config = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder setTrustConfig(
com.google.cloud.managedkafka.v1.TrustConfig.Builder builderForValue) {
if (trustConfigBuilder_ == null) {
trustConfig_ = builderForValue.build();
} else {
trustConfigBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The configuration of the broker truststore. If specified, clients
* can use mTLS for authentication.
* </pre>
*
* <code>
* .google.cloud.managedkafka.v1.TrustConfig trust_config = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder mergeTrustConfig(com.google.cloud.managedkafka.v1.TrustConfig value) {
if (trustConfigBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& trustConfig_ != null
&& trustConfig_ != com.google.cloud.managedkafka.v1.TrustConfig.getDefaultInstance()) {
getTrustConfigBuilder().mergeFrom(value);
} else {
trustConfig_ = value;
}
} else {
trustConfigBuilder_.mergeFrom(value);
}
if (trustConfig_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Optional. The configuration of the broker truststore. If specified, clients
* can use mTLS for authentication.
* </pre>
*
* <code>
* .google.cloud.managedkafka.v1.TrustConfig trust_config = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder clearTrustConfig() {
bitField0_ = (bitField0_ & ~0x00000001);
trustConfig_ = null;
if (trustConfigBuilder_ != null) {
trustConfigBuilder_.dispose();
trustConfigBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The configuration of the broker truststore. If specified, clients
* can use mTLS for authentication.
* </pre>
*
* <code>
* .google.cloud.managedkafka.v1.TrustConfig trust_config = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public com.google.cloud.managedkafka.v1.TrustConfig.Builder getTrustConfigBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getTrustConfigFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Optional. The configuration of the broker truststore. If specified, clients
* can use mTLS for authentication.
* </pre>
*
* <code>
* .google.cloud.managedkafka.v1.TrustConfig trust_config = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public com.google.cloud.managedkafka.v1.TrustConfigOrBuilder getTrustConfigOrBuilder() {
if (trustConfigBuilder_ != null) {
return trustConfigBuilder_.getMessageOrBuilder();
} else {
return trustConfig_ == null
? com.google.cloud.managedkafka.v1.TrustConfig.getDefaultInstance()
: trustConfig_;
}
}
/**
*
*
* <pre>
* Optional. The configuration of the broker truststore. If specified, clients
* can use mTLS for authentication.
* </pre>
*
* <code>
* .google.cloud.managedkafka.v1.TrustConfig trust_config = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.managedkafka.v1.TrustConfig,
com.google.cloud.managedkafka.v1.TrustConfig.Builder,
com.google.cloud.managedkafka.v1.TrustConfigOrBuilder>
getTrustConfigFieldBuilder() {
if (trustConfigBuilder_ == null) {
trustConfigBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.managedkafka.v1.TrustConfig,
com.google.cloud.managedkafka.v1.TrustConfig.Builder,
com.google.cloud.managedkafka.v1.TrustConfigOrBuilder>(
getTrustConfig(), getParentForChildren(), isClean());
trustConfig_ = null;
}
return trustConfigBuilder_;
}
private java.lang.Object sslPrincipalMappingRules_ = "";
/**
*
*
* <pre>
* Optional. A list of rules for mapping from SSL principal names to
* short names. These are applied in order by Kafka.
* Refer to the Apache Kafka documentation for `ssl.principal.mapping.rules`
* for the precise formatting details and syntax.
* Example: "RULE:^CN=(.*?),OU=ServiceUsers.*$/$1@example.com/,DEFAULT"
*
* This is a static Kafka broker configuration. Setting or modifying this
* field will trigger a rolling restart of the Kafka brokers to apply
* the change. An empty string means no rules are applied (Kafka default).
* </pre>
*
* <code>string ssl_principal_mapping_rules = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The sslPrincipalMappingRules.
*/
public java.lang.String getSslPrincipalMappingRules() {
java.lang.Object ref = sslPrincipalMappingRules_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
sslPrincipalMappingRules_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. A list of rules for mapping from SSL principal names to
* short names. These are applied in order by Kafka.
* Refer to the Apache Kafka documentation for `ssl.principal.mapping.rules`
* for the precise formatting details and syntax.
* Example: "RULE:^CN=(.*?),OU=ServiceUsers.*$/$1@example.com/,DEFAULT"
*
* This is a static Kafka broker configuration. Setting or modifying this
* field will trigger a rolling restart of the Kafka brokers to apply
* the change. An empty string means no rules are applied (Kafka default).
* </pre>
*
* <code>string ssl_principal_mapping_rules = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The bytes for sslPrincipalMappingRules.
*/
public com.google.protobuf.ByteString getSslPrincipalMappingRulesBytes() {
java.lang.Object ref = sslPrincipalMappingRules_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
sslPrincipalMappingRules_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. A list of rules for mapping from SSL principal names to
* short names. These are applied in order by Kafka.
* Refer to the Apache Kafka documentation for `ssl.principal.mapping.rules`
* for the precise formatting details and syntax.
* Example: "RULE:^CN=(.*?),OU=ServiceUsers.*$/$1@example.com/,DEFAULT"
*
* This is a static Kafka broker configuration. Setting or modifying this
* field will trigger a rolling restart of the Kafka brokers to apply
* the change. An empty string means no rules are applied (Kafka default).
* </pre>
*
* <code>string ssl_principal_mapping_rules = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @param value The sslPrincipalMappingRules to set.
* @return This builder for chaining.
*/
public Builder setSslPrincipalMappingRules(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
sslPrincipalMappingRules_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. A list of rules for mapping from SSL principal names to
* short names. These are applied in order by Kafka.
* Refer to the Apache Kafka documentation for `ssl.principal.mapping.rules`
* for the precise formatting details and syntax.
* Example: "RULE:^CN=(.*?),OU=ServiceUsers.*$/$1@example.com/,DEFAULT"
*
* This is a static Kafka broker configuration. Setting or modifying this
* field will trigger a rolling restart of the Kafka brokers to apply
* the change. An empty string means no rules are applied (Kafka default).
* </pre>
*
* <code>string ssl_principal_mapping_rules = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return This builder for chaining.
*/
public Builder clearSslPrincipalMappingRules() {
sslPrincipalMappingRules_ = getDefaultInstance().getSslPrincipalMappingRules();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. A list of rules for mapping from SSL principal names to
* short names. These are applied in order by Kafka.
* Refer to the Apache Kafka documentation for `ssl.principal.mapping.rules`
* for the precise formatting details and syntax.
* Example: "RULE:^CN=(.*?),OU=ServiceUsers.*$/$1@example.com/,DEFAULT"
*
* This is a static Kafka broker configuration. Setting or modifying this
* field will trigger a rolling restart of the Kafka brokers to apply
* the change. An empty string means no rules are applied (Kafka default).
* </pre>
*
* <code>string ssl_principal_mapping_rules = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @param value The bytes for sslPrincipalMappingRules to set.
* @return This builder for chaining.
*/
public Builder setSslPrincipalMappingRulesBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
sslPrincipalMappingRules_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.managedkafka.v1.TlsConfig)
}
// @@protoc_insertion_point(class_scope:google.cloud.managedkafka.v1.TlsConfig)
private static final com.google.cloud.managedkafka.v1.TlsConfig DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.managedkafka.v1.TlsConfig();
}
public static com.google.cloud.managedkafka.v1.TlsConfig getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<TlsConfig> PARSER =
new com.google.protobuf.AbstractParser<TlsConfig>() {
@java.lang.Override
public TlsConfig parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<TlsConfig> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<TlsConfig> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.managedkafka.v1.TlsConfig getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
oracle/coherence | 34,794 | prj/coherence-concurrent/src/main/java/com/oracle/coherence/concurrent/executor/util/CronPattern.java | /*
* Copyright (c) 2016, 2021, Oracle and/or its affiliates.
*
* Licensed under the Universal Permissive License v 1.0 as shown at
* http://oss.oracle.com/licenses/upl.
*/
package com.oracle.coherence.concurrent.executor.util;
import com.oracle.coherence.concurrent.executor.function.Predicates;
import com.tangosol.util.function.Remote.Predicate;
import java.time.Instant;
import java.time.ZonedDateTime;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.StringTokenizer;
import java.util.TimeZone;
/**
* <p>
* A UNIX crontab-like pattern is a string split in five space separated parts. Each part is intended as:
* </p>
* <ol>
* <li><strong>Minutes sub-pattern</strong>. During which minutes of the hour
* should the task been launched? The values range is from 0 to 59.</li>
* <li><strong>Hours sub-pattern</strong>. During which hours of the day should
* the task been launched? The values range is from 0 to 23.</li>
* <li><strong>Days of month sub-pattern</strong>. During which days of the
* month should the task been launched? The values range is from 1 to 31. The
* special value L can be used to recognize the last day of month.</li>
* <li><strong>Months sub-pattern</strong>. During which months of the year
* should the task been launched? The values range is from 1 (January) to 12
* (December), otherwise this sub-pattern allows the aliases "jan",
* "feb", "mar", "apr", "may",
* "jun", "jul", "aug", "sep",
* "oct", "nov" and "dec".</li>
* <li><strong>Days of week sub-pattern</strong>. During which days of the week
* should the task been launched? The values range is from 0 (Sunday) to 6
* (Saturday), otherwise this sub-pattern allows the aliases "sun",
* "mon", "tue", "wed", "thu",
* "fri" and "sat".</li>
* </ol>
* <p>
* The star wildcard character is also admitted, indicating "every minute
* of the hour", "every hour of the day", "every day of the
* month", "every month of the year" and "every day of the
* week", according to the sub-pattern in which it is used.
* </p>
* <p>
* Once the scheduler is started, a task will be launched when the five parts in
* its scheduling pattern will be true at the same time.
* </p>
* <p>
* Some examples:
* </p>
* <p>
* <strong>5 * * * *</strong><br>
* This pattern causes a task to be launched once every hour, at the begin of
* the fifth minute (00:05, 01:05, 02:05 etc.).
* </p>
* <p>
* <strong>* * * * *</strong><br>
* This pattern causes a task to be launched every minute.
* </p>
* <p>
* <strong>* 12 * * Mon</strong><br>
* This pattern causes a task to be launched every minute during the 12th hour
* of Monday.
* </p>
* <p>
* <strong>* 12 16 * Mon</strong><br>
* This pattern causes a task to be launched every minute during the 12th hour
* of Monday, 16th, but only if the day is the 16th of the month.
* </p>
* <p>
* Every sub-pattern can contain two or more comma separated values.
* </p>
* <p>
* <strong>59 11 * * 1,2,3,4,5</strong><br>
* This pattern causes a task to be launched at 11:59AM on Monday, Tuesday,
* Wednesday, Thursday and Friday.
* </p>
* <p>
* Values intervals are admitted and defined using the minus character.
* </p>
* <p>
* <strong>59 11 * * 1-5</strong><br>
* This pattern is equivalent to the previous one.
* </p>
* <p>
* The slash character can be used to identify step values within a range. It
* can be used both in the form <em>*/c</em> and <em>a-b/c</em>. The
* subpattern is matched every <em>c</em> values of the range
* <em>0,maxvalue</em> or <em>a-b</em>.
* </p>
* <p>
* <strong>*/5 * * * *</strong><br>
* This pattern causes a task to be launched every 5 minutes (0:00, 0:05, 0:10,
* 0:15 and so on).
* </p>
* <p>
* <strong>3-18/5 * * * *</strong><br>
* This pattern causes a task to be launched every 5 minutes starting from the
* third minute of the hour, up to the 18th (0:03, 0:08, 0:13, 0:18, 1:03, 1:08
* and so on).
* </p>
* <p>
* <strong>*/15 9-17 * * *</strong><br>
* This pattern causes a task to be launched every 15 minutes between the 9th
* and 17th hour of the day (9:00, 9:15, 9:30, 9:45 and so on... note that the
* last execution will be at 17:45).
* </p>
* <p>
* All the fresh described syntax rules can be used together.
* </p>
* <p>
* <strong>* 12 10-16/2 * *</strong><br>
* This pattern causes a task to be launched every minute during the 12th hour
* of the day, but only if the day is the 10th, the 12th, the 14th or the 16th
* of the month.
* </p>
* <p>
* <strong>* 12 1-15,17,20-25 * *</strong><br>
* This pattern causes a task to be launched every minute during the 12th hour
* of the day, but the day of the month must be between the 1st and the 15th,
* the 20th and the 25, or at least it must be the 17th.
* </p>
* <p>
* Finally it lets you combine more scheduling patterns into one, with the
* pipe character:
* </p>
* <p>
* <strong>0 5 * * *|8 10 * * *|22 17 * * *</strong><br>
* This pattern causes a task to be launched every day at 05:00, 10:08 and
* 17:22.
* </p>
* <p>
* Hourly
* <strong>5 * * * *</strong><br>
* This pattern causes a task to be launched at 5 minutes past every hour.
* </p>
* <p>
* Daily
* <strong>0 5 * * *</strong><br>
* This pattern causes a task to be launched every day at 05:00.
* </p>
* <p>
* Yearly
* <strong>0 5 1 1 *</strong><br>
* This pattern causes a task to be launched at 1st month, 1st day, 05:00 every year.
* </p>
* <p>
* Every 5 minutes
* <strong>*/5 * * * *</strong><br>
* This pattern causes a task to be launched every 5 minutes (asterisk followed by slash,
* followed by the 5 minute interval).
* </p>
*
* @author Adapted from the cron4j scheduler by Carlo Pelliccia
* @author lh, bo
* @since 21.12
*/
public class CronPattern
{
// ----- constructors ---------------------------------------------------
/**
* Builds a CronPattern based on the provided input argument formatted as
* a crontab-like string.
*
* @param sPattern the pattern as a crontab-like string
*
* @throws IllegalArgumentException if the supplied string is not a valid pattern
*/
public CronPattern(String sPattern) throws IllegalArgumentException
{
f_sPattern = sPattern;
StringTokenizer st1 = new StringTokenizer(sPattern, "|");
if (st1.countTokens() < 1)
{
throw new IllegalArgumentException("invalid pattern: \"" + sPattern + "\"");
}
while (st1.hasMoreTokens())
{
String sLocalPattern = st1.nextToken();
StringTokenizer st2 = new StringTokenizer(sLocalPattern, " \t");
if (st2.countTokens() != 5)
{
throw new IllegalArgumentException("invalid pattern: \"" + sLocalPattern + "\"");
}
try
{
m_listMinuteMatchers.add(buildPredicate(st2.nextToken(), MINUTE_VALUE_PARSER));
}
catch (Exception e)
{
throw new IllegalArgumentException("invalid pattern \""
+ sLocalPattern + "\". Error parsing minutes field: "
+ e.getMessage() + ".");
}
try
{
m_listHourMatchers.add(buildPredicate(st2.nextToken(), HOUR_VALUE_PARSER));
}
catch (Exception e)
{
throw new IllegalArgumentException("invalid pattern \""
+ sLocalPattern + "\". Error parsing hours field: "
+ e.getMessage() + ".");
}
try
{
m_listDayOfMonthMatchers.add(buildPredicate(st2.nextToken(), DAY_OF_MONTH_VALUE_PARSER));
}
catch (Exception e)
{
throw new IllegalArgumentException("invalid pattern \""
+ sLocalPattern
+ "\". Error parsing days of month field: "
+ e.getMessage() + ".");
}
try
{
m_listMonthMatchers.add(buildPredicate(st2.nextToken(), MONTH_VALUE_PARSER));
}
catch (Exception e)
{
throw new IllegalArgumentException("invalid pattern \""
+ sLocalPattern + "\". Error parsing months field: "
+ e.getMessage() + ".");
}
try
{
m_listDayOfWeekMatchers.add(buildPredicate(st2.nextToken(), DAY_OF_WEEK_VALUE_PARSER));
}
catch (Exception e)
{
throw new IllegalArgumentException("invalid pattern \""
+ sLocalPattern
+ "\". Error parsing days of week field: "
+ e.getMessage() + ".");
}
m_cMatcherSize++;
}
}
// ----- public methods -------------------------------------------------
/**
* Returns the next execution time in milliseconds from the crontab scheduling pattern, according to the
* given time zone.
*
* @param timezone a time zone
* @param cMillis the timestamp, as a UNIX-era millis value
*
* @return the next execute time
*/
public long getNextExecuteTime(TimeZone timezone, long cMillis)
{
ZonedDateTime zdt = ZonedDateTime.ofInstant(Instant.ofEpochMilli(cMillis), timezone.toZoneId());
int nMinute = zdt.getMinute();
int nHour = zdt.getHour();
int nDayOfMonth = zdt.getDayOfMonth();
int nMonth = zdt.getMonth().getValue();
int nDayOfWeek = zdt.getDayOfWeek().getValue();
int nYear = zdt.getYear();
for (int i = 0; i < m_cMatcherSize; i++)
{
Predicate<?> minuteMatcher = m_listMinuteMatchers.get(i);
int nextMinute = getNextMinute(nMinute, minuteMatcher);
if (nextMinute > nMinute)
{
return zdt.withMinute(nextMinute).toInstant().toEpochMilli();
}
Predicate<?> hourMatcher = m_listHourMatchers.get(i);
int nextHour = getNextHour(nHour, hourMatcher);
if (nextHour > nHour)
{
return zdt.withMinute(nextMinute).withHour(nextHour).toInstant().toEpochMilli();
}
Predicate<?> dayOfMonthMatcher = m_listDayOfMonthMatchers.get(i);
Predicate<?> dayOfWeekMatcher = m_listDayOfWeekMatchers.get(i);
boolean fDayOfMonthSet = false;
int nNextDayOfMonth = 0;
if (dayOfMonthMatcher instanceof Predicates.AlwaysPredicate
&& dayOfWeekMatcher instanceof Predicates.AlwaysPredicate
|| dayOfMonthMatcher instanceof IntArrayPredicate)
{
nNextDayOfMonth = getNextDayOfMonth(zdt, dayOfMonthMatcher);
if (nNextDayOfMonth > nDayOfMonth)
{
if (dayOfWeekMatcher instanceof Predicates.AlwaysPredicate)
{
return zdt.withMinute(nextMinute).withHour(nextHour).withDayOfMonth(nNextDayOfMonth)
.toInstant().toEpochMilli();
}
fDayOfMonthSet = true;
}
}
if (dayOfWeekMatcher instanceof IntArrayPredicate)
{
int nNextDayOfWeek = getNextDayOfWeek(nDayOfWeek, dayOfWeekMatcher);
int cOffset = nNextDayOfWeek > nDayOfWeek
? nNextDayOfWeek - nDayOfWeek
: nNextDayOfWeek + 7 - nDayOfWeek;
if (fDayOfMonthSet)
{
if (cOffset > (nNextDayOfMonth - nDayOfMonth))
{
cOffset = nNextDayOfMonth - nDayOfMonth;
}
return zdt.withMinute(nextMinute).withHour(nextHour).plusDays(cOffset).toInstant().toEpochMilli();
}
else if (zdt.plusDays(cOffset).getDayOfMonth() > nDayOfMonth)
{
return zdt.withMinute(nextMinute).withHour(nextHour).plusDays(cOffset).toInstant().toEpochMilli();
}
ZonedDateTime nextZdt = zdt.plusDays(cOffset);
if (nNextDayOfMonth > 0)
{
int nextDay = nextZdt.getDayOfMonth();
if (nextDay < nNextDayOfMonth)
{
nNextDayOfMonth = nextDay;
}
}
else
{
nNextDayOfMonth = nextZdt.getDayOfMonth();
}
}
Predicate<?> monthMatcher = m_listMonthMatchers.get(i);
int nNextMonth = getNextMonth(nMonth, monthMatcher);
if (nNextMonth > nMonth)
{
return zdt.withMinute(nextMinute).withHour(nextHour).withDayOfMonth(nNextDayOfMonth)
.withMonth(nNextMonth).toInstant().toEpochMilli();
}
int nextYear = nYear + 1;
zdt = zdt.withMinute(nextMinute).withHour(nextHour)
.withDayOfMonth(nNextDayOfMonth).withMonth(nNextMonth).withYear(nextYear);
}
return zdt.toInstant().toEpochMilli();
}
/**
* Returns the next execution time in milliseconds given timestamp (expressed as a UNIX-era millis value) using the
* system default time zone.
*
* @param cMillis the timestamp, as a UNIX-era millis value
*
* @return the next execution time in milliseconds
*/
public long getNextExecuteTime(long cMillis)
{
return getNextExecuteTime(TimeZone.getDefault(), cMillis);
}
/**
* Returns the next minute to execute the task.
*
* @param nMinute the current minute
* @param predicate the predicate for getting the next minute
*
* @return the next minute to execute the task
*/
public int getNextMinute(int nMinute, Predicate<?> predicate)
{
if (predicate instanceof Predicates.AlwaysPredicate)
{
return nMinute > 58 ? 0 : nMinute + 1;
}
return ((IntArrayPredicate) predicate).getNext(nMinute);
}
/**
* Returns the next hour to execute the task.
*
* @param nHour the current hour
* @param predicate the predicate for getting the next hour
*
* @return the next hour to execute the task
*/
public int getNextHour(int nHour, Predicate<?> predicate)
{
if (predicate instanceof Predicates.AlwaysPredicate)
{
return nHour > 22 ? 0 : nHour + 1;
}
else
{
return ((IntArrayPredicate) predicate).getNext(nHour);
}
}
/**
* Returns the next dayOfMonth to execute the task.
*
* @param zdt the current ZonedDateTime
* @param predicate the predicate for getting the next dayOfMonth
*
* @return the next dayOfMonth to execute the task
*/
public int getNextDayOfMonth(ZonedDateTime zdt, Predicate<?> predicate)
{
if (predicate instanceof Predicates.AlwaysPredicate)
{
return zdt.toLocalDate().plusDays(1).getDayOfMonth();
}
else
{
return ((IntArrayPredicate) predicate).getNext(zdt.getDayOfMonth());
}
}
/**
* Returns the next dayOfWeek (0(Sunday) - 6(Saturday)) to execute the task.
*
* @param nDayOfWeek the current dayOfWeek
* @param predicate the predicate for getting the next dayOfWeek
*
* @return the next dayOfWeek to execute the task
*/
public int getNextDayOfWeek(int nDayOfWeek, Predicate<?> predicate)
{
if (predicate instanceof Predicates.AlwaysPredicate)
{
return nDayOfWeek > 5 ? 0 : nDayOfWeek + 1;
}
else
{
return ((IntArrayPredicate) predicate).getNext(nDayOfWeek);
}
}
/**
* Returns the next dayOfWeek to execute the task.
*
* @param nMonth the current month
* @param predicate the predicate for getting the next month
*
* @return the next month to execute the task
*/
public int getNextMonth(int nMonth, Predicate<?> predicate)
{
if (predicate instanceof Predicates.AlwaysPredicate)
{
return nMonth > 11 ? 1 : nMonth + 1;
}
else
{
return ((IntArrayPredicate) predicate).getNext(nMonth);
}
}
// ----- Object methods -------------------------------------------------
/**
* Returns the pattern as a string.
*
* @return the pattern as a string
*/
@Override
public String toString()
{
return f_sPattern;
}
// ----- helper methods -------------------------------------------------
/**
* A Predicate utility builder.
*
* @param sPattern the pattern part for the Predicate creation
* @param parser the parser used to parse the values
*
* @return the requested {@link Predicate}
*
* @throws Exception if the supplied pattern part is not valid
*/
protected Predicate<?> buildPredicate(String sPattern, ValueParser parser)
throws Exception
{
if (sPattern.length() == 1 && sPattern.charAt(0) == '*')
{
return Predicates.AlwaysPredicate.get();
}
List<Integer> listValues = new ArrayList<>();
StringTokenizer st = new StringTokenizer(sPattern, ",");
while (st.hasMoreTokens())
{
String sElement = st.nextToken();
List<Integer> listLocal;
try
{
listLocal = parseListElement(sElement, parser);
}
catch (Exception e)
{
throw new Exception("invalid field \"" + sPattern
+ "\", invalid element \"" + sElement + "\", "
+ e.getMessage());
}
for (Iterator<Integer> i = listLocal.iterator(); i.hasNext(); )
{
Integer value = i.next();
if (!listValues.contains(value))
{
listValues.add(value);
}
}
}
if (listValues.isEmpty())
{
throw new Exception("invalid field \"" + sPattern + "\"");
}
if (parser == DAY_OF_MONTH_VALUE_PARSER)
{
return new DayOfMonthPredicate(listValues);
}
else
{
return new IntArrayPredicate(listValues);
}
}
/**
* Parses an individual part/element of the crontab configuration.
*
* @param sElement the element string
* @param parser the parser used to parse the values
*
* @return a {@link List} of {@link Integer integers} representing the allowed values
*
* @throws Exception if the supplied pattern part is not valid
*/
protected List<Integer> parseListElement(String sElement, ValueParser parser)
throws Exception
{
StringTokenizer st = new StringTokenizer(sElement, "/");
int cSize = st.countTokens();
if (cSize < 1 || cSize > 2)
{
throw new Exception("syntax error");
}
List<Integer> listValues;
try
{
listValues = parseRange(st.nextToken(), parser);
}
catch (Exception e)
{
throw new Exception("invalid range, " + e.getMessage());
}
if (cSize == 2)
{
String dStr = st.nextToken();
int nDiv;
try
{
nDiv = Integer.parseInt(dStr);
}
catch (NumberFormatException e)
{
throw new Exception("invalid divisor \"" + dStr + "\"");
}
if (nDiv < 1)
{
throw new Exception("non positive divisor \"" + nDiv + "\"");
}
List<Integer> listValues2 = new ArrayList<>();
for (int i = 0; i < listValues.size(); i += nDiv)
{
listValues2.add(listValues.get(i));
}
return listValues2;
}
else
{
return listValues;
}
}
/**
* Parses a range of values.
*
* @param sRange the range string
* @param parser the parser used to parse the values
*
* @return a {@link List} of {@link Integer integers} representing the allowed values
*
* @throws Exception if the supplied pattern part is not valid
*/
protected List<Integer> parseRange(String sRange, ValueParser parser)
throws Exception
{
if (sRange.length() == 1 && sRange.charAt(0) == '*')
{
int cMin = parser.getMinValue();
int cMax = parser.getMaxValue();
List<Integer> values = new ArrayList<>();
for (int i = cMin; i <= cMax; i++)
{
values.add(i);
}
return values;
}
StringTokenizer st = new StringTokenizer(sRange, "-");
int cSize = st.countTokens();
if (cSize < 1 || cSize > 2)
{
throw new Exception("syntax error");
}
String v1Str = st.nextToken();
int nV1;
try
{
nV1 = parser.parse(v1Str);
}
catch (Exception e)
{
throw new Exception("invalid value \"" + v1Str + "\", "
+ e.getMessage());
}
if (cSize == 1)
{
List<Integer> listValues = new ArrayList<>();
listValues.add(nV1);
return listValues;
}
else
{
String v2Str = st.nextToken();
int nV2;
try
{
nV2 = parser.parse(v2Str);
}
catch (Exception e)
{
throw new Exception("invalid value \"" + v2Str + "\", "
+ e.getMessage());
}
List<Integer> listValues = new ArrayList<>();
if (nV1 < nV2)
{
for (int i = nV1; i <= nV2; i++)
{
listValues.add(i);
}
}
else if (nV1 > nV2)
{
int cMin = parser.getMinValue();
int cMax = parser.getMaxValue();
for (int i = nV1; i <= cMax; i++)
{
listValues.add(i);
}
for (int i = cMin; i <= nV2; i++)
{
listValues.add(i);
}
}
else
{
// v1 == v2
listValues.add(nV1);
}
return listValues;
}
}
/**
* This utility method changes an alias to an int value.
*
* @param sValue the value
* @param asAliases the aliases list
* @param cOffset the offset applied to the aliases list indices
*
* @return the parsed value
*
* @throws Exception if the expressed values doesn't match any alias
*/
protected static int parseAlias(String sValue, String[] asAliases, int cOffset)
throws Exception
{
for (int i = 0; i < asAliases.length; i++)
{
if (asAliases[i].equalsIgnoreCase(sValue))
{
return cOffset + i;
}
}
throw new Exception("invalid alias \"" + sValue + "\"");
}
// ----- inner interface: ValueParser -----------------------------------
/**
* Definition for a value parser.
*/
protected interface ValueParser
{
/**
* Attempts to parse a value.
*
* @param sValue the value
*
* @return the parsed value
*
* @throws Exception if the value can't be parsed
*/
int parse(String sValue)
throws Exception;
/**
* Returns the minimum value accepted by the parser.
*
* @return The minimum value accepted by the parser
*/
int getMinValue();
/**
* Returns the maximum value accepted by the parser.
*
* @return The maximum value accepted by the parser
*/
int getMaxValue();
}
// ----- inner class: SimpleValueParser ---------------------------------
/**
* A simple value parser.
*/
private static class SimpleValueParser
implements ValueParser
{
// ----- constructors -----------------------------------------------
/**
* Builds the value parser.
*
* @param minValue the minimum allowed value
* @param nMaxValue the maximum allowed value
*/
public SimpleValueParser(int minValue, int nMaxValue)
{
m_nMinValue = minValue;
m_nMaxValue = nMaxValue;
}
// ----- ValueParser interface --------------------------------------
@Override
public int parse(String sValue) throws Exception
{
int i;
try
{
i = Integer.parseInt(sValue);
}
catch (NumberFormatException e)
{
throw new Exception("invalid integer value");
}
if (i < m_nMinValue || i > m_nMaxValue)
{
throw new Exception("value out of range");
}
return i;
}
@Override
public int getMinValue()
{
return m_nMinValue;
}
@Override
public int getMaxValue()
{
return m_nMaxValue;
}
// ----- data members -----------------------------------------------
/**
* The minimum allowed value.
*/
protected int m_nMinValue;
/**
* The maximum allowed value.
*/
protected int m_nMaxValue;
}
// ----- inner class: MinuteValueParser ---------------------------------
/**
* The minutes value parser.
*/
private static class MinuteValueParser
extends SimpleValueParser
{
// ----- constructors -----------------------------------------------
/**
* Builds the value parser.
*/
public MinuteValueParser()
{
super(0, 59);
}
}
// ----- inner class: HourValueParser -----------------------------------
/**
* The hours value parser.
*/
private static class HourValueParser
extends SimpleValueParser
{
// ----- constructors -----------------------------------------------
/**
* Builds the value parser.
*/
public HourValueParser()
{
super(0, 23);
}
}
// ----- inner class: DayOfMonthValueParser -----------------------------
/**
* The days of month value parser.
*/
private static class DayOfMonthValueParser
extends SimpleValueParser
{
// ----- constructors -----------------------------------------------
/**
* Builds the value parser.
*/
public DayOfMonthValueParser()
{
super(1, 31);
}
// ----- methods from SimpleValueParser -----------------------------
/**
* Added to support last-day-of-month.
*
* @param sValue the value to be parsed
*
* @return the integer day of the month or 32 for last day of the month
*
* @throws Exception if the input value is invalid
*/
public int parse(String sValue)
throws Exception
{
if (sValue.equalsIgnoreCase("L"))
{
return 32;
}
else
{
return super.parse(sValue);
}
}
}
// ----- inner class: MonthValueParser ----------------------------------
/**
* The value parser for the months field.
*/
private static class MonthValueParser
extends SimpleValueParser
{
// ----- constructors -----------------------------------------------
/**
* Builds the months value parser.
*/
public MonthValueParser()
{
super(1, 12);
}
// ----- SimpleValueParser methods ----------------------------------
/**
* Parses the given string value as a calendar month.
*
* @param sValue the value to parse
*
* @return the parsed result
*
* @throws Exception if the argument can't be parsed
*/
public int parse(String sValue) throws Exception
{
try
{
// try as a simple value
return super.parse(sValue);
}
catch (Exception e)
{
// try as an alias
return parseAlias(sValue, ALIASES, 1);
}
}
// ----- constants --------------------------------------------------
/**
* Months aliases.
*/
private static final String[] ALIASES = {"jan", "feb", "mar", "apr", "may",
"jun", "jul", "aug", "sep", "oct", "nov", "dec"};
}
// ----- inner class: DayOfWeekValueParser ------------------------------
/**
* The value parser for the months field.
*/
private static class DayOfWeekValueParser
extends SimpleValueParser
{
// ----- constructors -----------------------------------------------
/**
* Builds the months value parser.
*/
public DayOfWeekValueParser()
{
super(0, 7);
}
/**
* Parses the given string value as a day of the week.
*
* @param sValue the value to parse
*
* @return the parsed result
*
* @throws Exception if the argument can't be parsed
*/
public int parse(String sValue) throws Exception
{
try
{
// try as a simple value
return super.parse(sValue) % 7;
}
catch (Exception e)
{
// try as an alias
return parseAlias(sValue, ALIASES, 0);
}
}
// ----- constants --------------------------------------------------
/**
* Days of week aliases.
*/
private static final String[] ALIASES = {"sun", "mon", "tue", "wed", "thu", "fri", "sat"};
}
// ----- constants --------------------------------------------------
/**
* The parser for the minute values.
*/
protected static final ValueParser MINUTE_VALUE_PARSER = new MinuteValueParser();
/**
* The parser for the hour values.
*/
protected static final ValueParser HOUR_VALUE_PARSER = new HourValueParser();
/**
* The parser for the day of month values.
*/
protected static final ValueParser DAY_OF_MONTH_VALUE_PARSER = new DayOfMonthValueParser();
/**
* The parser for the month values.
*/
protected static final ValueParser MONTH_VALUE_PARSER = new MonthValueParser();
/**
* The parser for the day of week values.
*/
protected static final ValueParser DAY_OF_WEEK_VALUE_PARSER = new DayOfWeekValueParser();
// ----- data members -----------------------------------------------
/**
* The pattern as a string.
*/
protected final String f_sPattern;
/**
* The Predicate list for the "minute" field.
*/
protected List<Predicate<?>> m_listMinuteMatchers = new ArrayList<>();
/**
* The Predicate list for the "hour" field.
*/
protected List<Predicate<?>> m_listHourMatchers = new ArrayList<>();
/**
* The Predicate list for the "day of month" field.
*/
protected List<Predicate<?>> m_listDayOfMonthMatchers = new ArrayList<>();
/**
* The Predicate list for the "month" field.
*/
protected List<Predicate<?>> m_listMonthMatchers = new ArrayList<>();
/**
* The Predicate list for the "day of week" field.
*/
protected List<Predicate<?>> m_listDayOfWeekMatchers = new ArrayList<>();
/**
* How many predicate groups in this pattern?
*/
protected int m_cMatcherSize = 0;
}
|
googleapis/google-cloud-java | 34,957 | java-chronicle/proto-google-cloud-chronicle-v1/src/main/java/com/google/cloud/chronicle/v1/UpdateWatchlistRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/chronicle/v1/entity.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.chronicle.v1;
/**
*
*
* <pre>
* Request message for updating watchlist.
* </pre>
*
* Protobuf type {@code google.cloud.chronicle.v1.UpdateWatchlistRequest}
*/
public final class UpdateWatchlistRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.chronicle.v1.UpdateWatchlistRequest)
UpdateWatchlistRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateWatchlistRequest.newBuilder() to construct.
private UpdateWatchlistRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateWatchlistRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateWatchlistRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.chronicle.v1.EntityProto
.internal_static_google_cloud_chronicle_v1_UpdateWatchlistRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.chronicle.v1.EntityProto
.internal_static_google_cloud_chronicle_v1_UpdateWatchlistRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.chronicle.v1.UpdateWatchlistRequest.class,
com.google.cloud.chronicle.v1.UpdateWatchlistRequest.Builder.class);
}
private int bitField0_;
public static final int WATCHLIST_FIELD_NUMBER = 1;
private com.google.cloud.chronicle.v1.Watchlist watchlist_;
/**
*
*
* <pre>
* Required. The watchlist to update.
*
* The watchlist's `name` field is used to identify the watchlist to update.
* Format:
* `projects/{project}/locations/{location}/instances/{instance}/watchlists/{watchlist}`
* </pre>
*
* <code>
* .google.cloud.chronicle.v1.Watchlist watchlist = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the watchlist field is set.
*/
@java.lang.Override
public boolean hasWatchlist() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The watchlist to update.
*
* The watchlist's `name` field is used to identify the watchlist to update.
* Format:
* `projects/{project}/locations/{location}/instances/{instance}/watchlists/{watchlist}`
* </pre>
*
* <code>
* .google.cloud.chronicle.v1.Watchlist watchlist = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The watchlist.
*/
@java.lang.Override
public com.google.cloud.chronicle.v1.Watchlist getWatchlist() {
return watchlist_ == null
? com.google.cloud.chronicle.v1.Watchlist.getDefaultInstance()
: watchlist_;
}
/**
*
*
* <pre>
* Required. The watchlist to update.
*
* The watchlist's `name` field is used to identify the watchlist to update.
* Format:
* `projects/{project}/locations/{location}/instances/{instance}/watchlists/{watchlist}`
* </pre>
*
* <code>
* .google.cloud.chronicle.v1.Watchlist watchlist = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.chronicle.v1.WatchlistOrBuilder getWatchlistOrBuilder() {
return watchlist_ == null
? com.google.cloud.chronicle.v1.Watchlist.getDefaultInstance()
: watchlist_;
}
public static final int UPDATE_MASK_FIELD_NUMBER = 2;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* Optional. The list of fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Optional. The list of fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* Optional. The list of fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getWatchlist());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getUpdateMask());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getWatchlist());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.chronicle.v1.UpdateWatchlistRequest)) {
return super.equals(obj);
}
com.google.cloud.chronicle.v1.UpdateWatchlistRequest other =
(com.google.cloud.chronicle.v1.UpdateWatchlistRequest) obj;
if (hasWatchlist() != other.hasWatchlist()) return false;
if (hasWatchlist()) {
if (!getWatchlist().equals(other.getWatchlist())) return false;
}
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasWatchlist()) {
hash = (37 * hash) + WATCHLIST_FIELD_NUMBER;
hash = (53 * hash) + getWatchlist().hashCode();
}
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.chronicle.v1.UpdateWatchlistRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.chronicle.v1.UpdateWatchlistRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.chronicle.v1.UpdateWatchlistRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.chronicle.v1.UpdateWatchlistRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.chronicle.v1.UpdateWatchlistRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.chronicle.v1.UpdateWatchlistRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.chronicle.v1.UpdateWatchlistRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.chronicle.v1.UpdateWatchlistRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.chronicle.v1.UpdateWatchlistRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.chronicle.v1.UpdateWatchlistRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.chronicle.v1.UpdateWatchlistRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.chronicle.v1.UpdateWatchlistRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.chronicle.v1.UpdateWatchlistRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for updating watchlist.
* </pre>
*
* Protobuf type {@code google.cloud.chronicle.v1.UpdateWatchlistRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.chronicle.v1.UpdateWatchlistRequest)
com.google.cloud.chronicle.v1.UpdateWatchlistRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.chronicle.v1.EntityProto
.internal_static_google_cloud_chronicle_v1_UpdateWatchlistRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.chronicle.v1.EntityProto
.internal_static_google_cloud_chronicle_v1_UpdateWatchlistRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.chronicle.v1.UpdateWatchlistRequest.class,
com.google.cloud.chronicle.v1.UpdateWatchlistRequest.Builder.class);
}
// Construct using com.google.cloud.chronicle.v1.UpdateWatchlistRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getWatchlistFieldBuilder();
getUpdateMaskFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
watchlist_ = null;
if (watchlistBuilder_ != null) {
watchlistBuilder_.dispose();
watchlistBuilder_ = null;
}
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.chronicle.v1.EntityProto
.internal_static_google_cloud_chronicle_v1_UpdateWatchlistRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.chronicle.v1.UpdateWatchlistRequest getDefaultInstanceForType() {
return com.google.cloud.chronicle.v1.UpdateWatchlistRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.chronicle.v1.UpdateWatchlistRequest build() {
com.google.cloud.chronicle.v1.UpdateWatchlistRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.chronicle.v1.UpdateWatchlistRequest buildPartial() {
com.google.cloud.chronicle.v1.UpdateWatchlistRequest result =
new com.google.cloud.chronicle.v1.UpdateWatchlistRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.chronicle.v1.UpdateWatchlistRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.watchlist_ = watchlistBuilder_ == null ? watchlist_ : watchlistBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.chronicle.v1.UpdateWatchlistRequest) {
return mergeFrom((com.google.cloud.chronicle.v1.UpdateWatchlistRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.chronicle.v1.UpdateWatchlistRequest other) {
if (other == com.google.cloud.chronicle.v1.UpdateWatchlistRequest.getDefaultInstance())
return this;
if (other.hasWatchlist()) {
mergeWatchlist(other.getWatchlist());
}
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getWatchlistFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.cloud.chronicle.v1.Watchlist watchlist_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.chronicle.v1.Watchlist,
com.google.cloud.chronicle.v1.Watchlist.Builder,
com.google.cloud.chronicle.v1.WatchlistOrBuilder>
watchlistBuilder_;
/**
*
*
* <pre>
* Required. The watchlist to update.
*
* The watchlist's `name` field is used to identify the watchlist to update.
* Format:
* `projects/{project}/locations/{location}/instances/{instance}/watchlists/{watchlist}`
* </pre>
*
* <code>
* .google.cloud.chronicle.v1.Watchlist watchlist = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the watchlist field is set.
*/
public boolean hasWatchlist() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The watchlist to update.
*
* The watchlist's `name` field is used to identify the watchlist to update.
* Format:
* `projects/{project}/locations/{location}/instances/{instance}/watchlists/{watchlist}`
* </pre>
*
* <code>
* .google.cloud.chronicle.v1.Watchlist watchlist = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The watchlist.
*/
public com.google.cloud.chronicle.v1.Watchlist getWatchlist() {
if (watchlistBuilder_ == null) {
return watchlist_ == null
? com.google.cloud.chronicle.v1.Watchlist.getDefaultInstance()
: watchlist_;
} else {
return watchlistBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The watchlist to update.
*
* The watchlist's `name` field is used to identify the watchlist to update.
* Format:
* `projects/{project}/locations/{location}/instances/{instance}/watchlists/{watchlist}`
* </pre>
*
* <code>
* .google.cloud.chronicle.v1.Watchlist watchlist = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setWatchlist(com.google.cloud.chronicle.v1.Watchlist value) {
if (watchlistBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
watchlist_ = value;
} else {
watchlistBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The watchlist to update.
*
* The watchlist's `name` field is used to identify the watchlist to update.
* Format:
* `projects/{project}/locations/{location}/instances/{instance}/watchlists/{watchlist}`
* </pre>
*
* <code>
* .google.cloud.chronicle.v1.Watchlist watchlist = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setWatchlist(com.google.cloud.chronicle.v1.Watchlist.Builder builderForValue) {
if (watchlistBuilder_ == null) {
watchlist_ = builderForValue.build();
} else {
watchlistBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The watchlist to update.
*
* The watchlist's `name` field is used to identify the watchlist to update.
* Format:
* `projects/{project}/locations/{location}/instances/{instance}/watchlists/{watchlist}`
* </pre>
*
* <code>
* .google.cloud.chronicle.v1.Watchlist watchlist = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeWatchlist(com.google.cloud.chronicle.v1.Watchlist value) {
if (watchlistBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& watchlist_ != null
&& watchlist_ != com.google.cloud.chronicle.v1.Watchlist.getDefaultInstance()) {
getWatchlistBuilder().mergeFrom(value);
} else {
watchlist_ = value;
}
} else {
watchlistBuilder_.mergeFrom(value);
}
if (watchlist_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The watchlist to update.
*
* The watchlist's `name` field is used to identify the watchlist to update.
* Format:
* `projects/{project}/locations/{location}/instances/{instance}/watchlists/{watchlist}`
* </pre>
*
* <code>
* .google.cloud.chronicle.v1.Watchlist watchlist = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearWatchlist() {
bitField0_ = (bitField0_ & ~0x00000001);
watchlist_ = null;
if (watchlistBuilder_ != null) {
watchlistBuilder_.dispose();
watchlistBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The watchlist to update.
*
* The watchlist's `name` field is used to identify the watchlist to update.
* Format:
* `projects/{project}/locations/{location}/instances/{instance}/watchlists/{watchlist}`
* </pre>
*
* <code>
* .google.cloud.chronicle.v1.Watchlist watchlist = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.chronicle.v1.Watchlist.Builder getWatchlistBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getWatchlistFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The watchlist to update.
*
* The watchlist's `name` field is used to identify the watchlist to update.
* Format:
* `projects/{project}/locations/{location}/instances/{instance}/watchlists/{watchlist}`
* </pre>
*
* <code>
* .google.cloud.chronicle.v1.Watchlist watchlist = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.chronicle.v1.WatchlistOrBuilder getWatchlistOrBuilder() {
if (watchlistBuilder_ != null) {
return watchlistBuilder_.getMessageOrBuilder();
} else {
return watchlist_ == null
? com.google.cloud.chronicle.v1.Watchlist.getDefaultInstance()
: watchlist_;
}
}
/**
*
*
* <pre>
* Required. The watchlist to update.
*
* The watchlist's `name` field is used to identify the watchlist to update.
* Format:
* `projects/{project}/locations/{location}/instances/{instance}/watchlists/{watchlist}`
* </pre>
*
* <code>
* .google.cloud.chronicle.v1.Watchlist watchlist = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.chronicle.v1.Watchlist,
com.google.cloud.chronicle.v1.Watchlist.Builder,
com.google.cloud.chronicle.v1.WatchlistOrBuilder>
getWatchlistFieldBuilder() {
if (watchlistBuilder_ == null) {
watchlistBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.chronicle.v1.Watchlist,
com.google.cloud.chronicle.v1.Watchlist.Builder,
com.google.cloud.chronicle.v1.WatchlistOrBuilder>(
getWatchlist(), getParentForChildren(), isClean());
watchlist_ = null;
}
return watchlistBuilder_;
}
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* Optional. The list of fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Optional. The list of fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Optional. The list of fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
} else {
updateMaskBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The list of fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The list of fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& updateMask_ != null
&& updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) {
getUpdateMaskBuilder().mergeFrom(value);
} else {
updateMask_ = value;
}
} else {
updateMaskBuilder_.mergeFrom(value);
}
if (updateMask_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Optional. The list of fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder clearUpdateMask() {
bitField0_ = (bitField0_ & ~0x00000002);
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The list of fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Optional. The list of fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* Optional. The list of fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.chronicle.v1.UpdateWatchlistRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.chronicle.v1.UpdateWatchlistRequest)
private static final com.google.cloud.chronicle.v1.UpdateWatchlistRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.chronicle.v1.UpdateWatchlistRequest();
}
public static com.google.cloud.chronicle.v1.UpdateWatchlistRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateWatchlistRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateWatchlistRequest>() {
@java.lang.Override
public UpdateWatchlistRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdateWatchlistRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateWatchlistRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.chronicle.v1.UpdateWatchlistRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 35,170 | java-shopping-merchant-inventories/grpc-google-shopping-merchant-inventories-v1/src/main/java/com/google/shopping/merchant/inventories/v1/RegionalInventoryServiceGrpc.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.shopping.merchant.inventories.v1;
import static io.grpc.MethodDescriptor.generateFullMethodName;
/**
*
*
* <pre>
* Service to manage regional inventory for products. There is also separate
* `regions` resource and API to manage regions definitions.
* </pre>
*/
@javax.annotation.Generated(
value = "by gRPC proto compiler",
comments = "Source: google/shopping/merchant/inventories/v1/regionalinventory.proto")
@io.grpc.stub.annotations.GrpcGenerated
public final class RegionalInventoryServiceGrpc {
private RegionalInventoryServiceGrpc() {}
public static final java.lang.String SERVICE_NAME =
"google.shopping.merchant.inventories.v1.RegionalInventoryService";
// Static method descriptors that strictly reflect the proto.
private static volatile io.grpc.MethodDescriptor<
com.google.shopping.merchant.inventories.v1.ListRegionalInventoriesRequest,
com.google.shopping.merchant.inventories.v1.ListRegionalInventoriesResponse>
getListRegionalInventoriesMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "ListRegionalInventories",
requestType =
com.google.shopping.merchant.inventories.v1.ListRegionalInventoriesRequest.class,
responseType =
com.google.shopping.merchant.inventories.v1.ListRegionalInventoriesResponse.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<
com.google.shopping.merchant.inventories.v1.ListRegionalInventoriesRequest,
com.google.shopping.merchant.inventories.v1.ListRegionalInventoriesResponse>
getListRegionalInventoriesMethod() {
io.grpc.MethodDescriptor<
com.google.shopping.merchant.inventories.v1.ListRegionalInventoriesRequest,
com.google.shopping.merchant.inventories.v1.ListRegionalInventoriesResponse>
getListRegionalInventoriesMethod;
if ((getListRegionalInventoriesMethod =
RegionalInventoryServiceGrpc.getListRegionalInventoriesMethod)
== null) {
synchronized (RegionalInventoryServiceGrpc.class) {
if ((getListRegionalInventoriesMethod =
RegionalInventoryServiceGrpc.getListRegionalInventoriesMethod)
== null) {
RegionalInventoryServiceGrpc.getListRegionalInventoriesMethod =
getListRegionalInventoriesMethod =
io.grpc.MethodDescriptor
.<com.google.shopping.merchant.inventories.v1.ListRegionalInventoriesRequest,
com.google.shopping.merchant.inventories.v1
.ListRegionalInventoriesResponse>
newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(
generateFullMethodName(SERVICE_NAME, "ListRegionalInventories"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.shopping.merchant.inventories.v1
.ListRegionalInventoriesRequest.getDefaultInstance()))
.setResponseMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.shopping.merchant.inventories.v1
.ListRegionalInventoriesResponse.getDefaultInstance()))
.setSchemaDescriptor(
new RegionalInventoryServiceMethodDescriptorSupplier(
"ListRegionalInventories"))
.build();
}
}
}
return getListRegionalInventoriesMethod;
}
private static volatile io.grpc.MethodDescriptor<
com.google.shopping.merchant.inventories.v1.InsertRegionalInventoryRequest,
com.google.shopping.merchant.inventories.v1.RegionalInventory>
getInsertRegionalInventoryMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "InsertRegionalInventory",
requestType =
com.google.shopping.merchant.inventories.v1.InsertRegionalInventoryRequest.class,
responseType = com.google.shopping.merchant.inventories.v1.RegionalInventory.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<
com.google.shopping.merchant.inventories.v1.InsertRegionalInventoryRequest,
com.google.shopping.merchant.inventories.v1.RegionalInventory>
getInsertRegionalInventoryMethod() {
io.grpc.MethodDescriptor<
com.google.shopping.merchant.inventories.v1.InsertRegionalInventoryRequest,
com.google.shopping.merchant.inventories.v1.RegionalInventory>
getInsertRegionalInventoryMethod;
if ((getInsertRegionalInventoryMethod =
RegionalInventoryServiceGrpc.getInsertRegionalInventoryMethod)
== null) {
synchronized (RegionalInventoryServiceGrpc.class) {
if ((getInsertRegionalInventoryMethod =
RegionalInventoryServiceGrpc.getInsertRegionalInventoryMethod)
== null) {
RegionalInventoryServiceGrpc.getInsertRegionalInventoryMethod =
getInsertRegionalInventoryMethod =
io.grpc.MethodDescriptor
.<com.google.shopping.merchant.inventories.v1.InsertRegionalInventoryRequest,
com.google.shopping.merchant.inventories.v1.RegionalInventory>
newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(
generateFullMethodName(SERVICE_NAME, "InsertRegionalInventory"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.shopping.merchant.inventories.v1
.InsertRegionalInventoryRequest.getDefaultInstance()))
.setResponseMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.shopping.merchant.inventories.v1.RegionalInventory
.getDefaultInstance()))
.setSchemaDescriptor(
new RegionalInventoryServiceMethodDescriptorSupplier(
"InsertRegionalInventory"))
.build();
}
}
}
return getInsertRegionalInventoryMethod;
}
private static volatile io.grpc.MethodDescriptor<
com.google.shopping.merchant.inventories.v1.DeleteRegionalInventoryRequest,
com.google.protobuf.Empty>
getDeleteRegionalInventoryMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "DeleteRegionalInventory",
requestType =
com.google.shopping.merchant.inventories.v1.DeleteRegionalInventoryRequest.class,
responseType = com.google.protobuf.Empty.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<
com.google.shopping.merchant.inventories.v1.DeleteRegionalInventoryRequest,
com.google.protobuf.Empty>
getDeleteRegionalInventoryMethod() {
io.grpc.MethodDescriptor<
com.google.shopping.merchant.inventories.v1.DeleteRegionalInventoryRequest,
com.google.protobuf.Empty>
getDeleteRegionalInventoryMethod;
if ((getDeleteRegionalInventoryMethod =
RegionalInventoryServiceGrpc.getDeleteRegionalInventoryMethod)
== null) {
synchronized (RegionalInventoryServiceGrpc.class) {
if ((getDeleteRegionalInventoryMethod =
RegionalInventoryServiceGrpc.getDeleteRegionalInventoryMethod)
== null) {
RegionalInventoryServiceGrpc.getDeleteRegionalInventoryMethod =
getDeleteRegionalInventoryMethod =
io.grpc.MethodDescriptor
.<com.google.shopping.merchant.inventories.v1.DeleteRegionalInventoryRequest,
com.google.protobuf.Empty>
newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(
generateFullMethodName(SERVICE_NAME, "DeleteRegionalInventory"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.shopping.merchant.inventories.v1
.DeleteRegionalInventoryRequest.getDefaultInstance()))
.setResponseMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.protobuf.Empty.getDefaultInstance()))
.setSchemaDescriptor(
new RegionalInventoryServiceMethodDescriptorSupplier(
"DeleteRegionalInventory"))
.build();
}
}
}
return getDeleteRegionalInventoryMethod;
}
/** Creates a new async stub that supports all call types for the service */
public static RegionalInventoryServiceStub newStub(io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<RegionalInventoryServiceStub> factory =
new io.grpc.stub.AbstractStub.StubFactory<RegionalInventoryServiceStub>() {
@java.lang.Override
public RegionalInventoryServiceStub newStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new RegionalInventoryServiceStub(channel, callOptions);
}
};
return RegionalInventoryServiceStub.newStub(factory, channel);
}
/** Creates a new blocking-style stub that supports all types of calls on the service */
public static RegionalInventoryServiceBlockingV2Stub newBlockingV2Stub(io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<RegionalInventoryServiceBlockingV2Stub> factory =
new io.grpc.stub.AbstractStub.StubFactory<RegionalInventoryServiceBlockingV2Stub>() {
@java.lang.Override
public RegionalInventoryServiceBlockingV2Stub newStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new RegionalInventoryServiceBlockingV2Stub(channel, callOptions);
}
};
return RegionalInventoryServiceBlockingV2Stub.newStub(factory, channel);
}
/**
* Creates a new blocking-style stub that supports unary and streaming output calls on the service
*/
public static RegionalInventoryServiceBlockingStub newBlockingStub(io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<RegionalInventoryServiceBlockingStub> factory =
new io.grpc.stub.AbstractStub.StubFactory<RegionalInventoryServiceBlockingStub>() {
@java.lang.Override
public RegionalInventoryServiceBlockingStub newStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new RegionalInventoryServiceBlockingStub(channel, callOptions);
}
};
return RegionalInventoryServiceBlockingStub.newStub(factory, channel);
}
/** Creates a new ListenableFuture-style stub that supports unary calls on the service */
public static RegionalInventoryServiceFutureStub newFutureStub(io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<RegionalInventoryServiceFutureStub> factory =
new io.grpc.stub.AbstractStub.StubFactory<RegionalInventoryServiceFutureStub>() {
@java.lang.Override
public RegionalInventoryServiceFutureStub newStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new RegionalInventoryServiceFutureStub(channel, callOptions);
}
};
return RegionalInventoryServiceFutureStub.newStub(factory, channel);
}
/**
*
*
* <pre>
* Service to manage regional inventory for products. There is also separate
* `regions` resource and API to manage regions definitions.
* </pre>
*/
public interface AsyncService {
/**
*
*
* <pre>
* Lists the `RegionalInventory` resources for the given product in your
* merchant account. The response might contain fewer items than specified by
* `pageSize`. If `pageToken` was returned in previous request, it can be
* used to obtain additional results.
* `RegionalInventory` resources are listed per product for a given account.
* </pre>
*/
default void listRegionalInventories(
com.google.shopping.merchant.inventories.v1.ListRegionalInventoriesRequest request,
io.grpc.stub.StreamObserver<
com.google.shopping.merchant.inventories.v1.ListRegionalInventoriesResponse>
responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(
getListRegionalInventoriesMethod(), responseObserver);
}
/**
*
*
* <pre>
* Inserts a `RegionalInventory` to a given product in your
* merchant account.
* Replaces the full `RegionalInventory` resource if an entry with the same
* [`region`][google.shopping.merchant.inventories.v1.RegionalInventory.region]
* already exists for the product.
* It might take up to 30 minutes for the new or updated `RegionalInventory`
* resource to appear in products.
* </pre>
*/
default void insertRegionalInventory(
com.google.shopping.merchant.inventories.v1.InsertRegionalInventoryRequest request,
io.grpc.stub.StreamObserver<com.google.shopping.merchant.inventories.v1.RegionalInventory>
responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(
getInsertRegionalInventoryMethod(), responseObserver);
}
/**
*
*
* <pre>
* Deletes the specified `RegionalInventory` resource from the given product
* in your merchant account. It might take up to an hour for the
* `RegionalInventory` to be deleted from the specific product.
* Once you have received a successful delete response, wait for that
* period before attempting a delete again.
* </pre>
*/
default void deleteRegionalInventory(
com.google.shopping.merchant.inventories.v1.DeleteRegionalInventoryRequest request,
io.grpc.stub.StreamObserver<com.google.protobuf.Empty> responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(
getDeleteRegionalInventoryMethod(), responseObserver);
}
}
/**
* Base class for the server implementation of the service RegionalInventoryService.
*
* <pre>
* Service to manage regional inventory for products. There is also separate
* `regions` resource and API to manage regions definitions.
* </pre>
*/
public abstract static class RegionalInventoryServiceImplBase
implements io.grpc.BindableService, AsyncService {
@java.lang.Override
public final io.grpc.ServerServiceDefinition bindService() {
return RegionalInventoryServiceGrpc.bindService(this);
}
}
/**
* A stub to allow clients to do asynchronous rpc calls to service RegionalInventoryService.
*
* <pre>
* Service to manage regional inventory for products. There is also separate
* `regions` resource and API to manage regions definitions.
* </pre>
*/
public static final class RegionalInventoryServiceStub
extends io.grpc.stub.AbstractAsyncStub<RegionalInventoryServiceStub> {
private RegionalInventoryServiceStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected RegionalInventoryServiceStub build(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new RegionalInventoryServiceStub(channel, callOptions);
}
/**
*
*
* <pre>
* Lists the `RegionalInventory` resources for the given product in your
* merchant account. The response might contain fewer items than specified by
* `pageSize`. If `pageToken` was returned in previous request, it can be
* used to obtain additional results.
* `RegionalInventory` resources are listed per product for a given account.
* </pre>
*/
public void listRegionalInventories(
com.google.shopping.merchant.inventories.v1.ListRegionalInventoriesRequest request,
io.grpc.stub.StreamObserver<
com.google.shopping.merchant.inventories.v1.ListRegionalInventoriesResponse>
responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getListRegionalInventoriesMethod(), getCallOptions()),
request,
responseObserver);
}
/**
*
*
* <pre>
* Inserts a `RegionalInventory` to a given product in your
* merchant account.
* Replaces the full `RegionalInventory` resource if an entry with the same
* [`region`][google.shopping.merchant.inventories.v1.RegionalInventory.region]
* already exists for the product.
* It might take up to 30 minutes for the new or updated `RegionalInventory`
* resource to appear in products.
* </pre>
*/
public void insertRegionalInventory(
com.google.shopping.merchant.inventories.v1.InsertRegionalInventoryRequest request,
io.grpc.stub.StreamObserver<com.google.shopping.merchant.inventories.v1.RegionalInventory>
responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getInsertRegionalInventoryMethod(), getCallOptions()),
request,
responseObserver);
}
/**
*
*
* <pre>
* Deletes the specified `RegionalInventory` resource from the given product
* in your merchant account. It might take up to an hour for the
* `RegionalInventory` to be deleted from the specific product.
* Once you have received a successful delete response, wait for that
* period before attempting a delete again.
* </pre>
*/
public void deleteRegionalInventory(
com.google.shopping.merchant.inventories.v1.DeleteRegionalInventoryRequest request,
io.grpc.stub.StreamObserver<com.google.protobuf.Empty> responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getDeleteRegionalInventoryMethod(), getCallOptions()),
request,
responseObserver);
}
}
/**
* A stub to allow clients to do synchronous rpc calls to service RegionalInventoryService.
*
* <pre>
* Service to manage regional inventory for products. There is also separate
* `regions` resource and API to manage regions definitions.
* </pre>
*/
public static final class RegionalInventoryServiceBlockingV2Stub
extends io.grpc.stub.AbstractBlockingStub<RegionalInventoryServiceBlockingV2Stub> {
private RegionalInventoryServiceBlockingV2Stub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected RegionalInventoryServiceBlockingV2Stub build(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new RegionalInventoryServiceBlockingV2Stub(channel, callOptions);
}
/**
*
*
* <pre>
* Lists the `RegionalInventory` resources for the given product in your
* merchant account. The response might contain fewer items than specified by
* `pageSize`. If `pageToken` was returned in previous request, it can be
* used to obtain additional results.
* `RegionalInventory` resources are listed per product for a given account.
* </pre>
*/
public com.google.shopping.merchant.inventories.v1.ListRegionalInventoriesResponse
listRegionalInventories(
com.google.shopping.merchant.inventories.v1.ListRegionalInventoriesRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getListRegionalInventoriesMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Inserts a `RegionalInventory` to a given product in your
* merchant account.
* Replaces the full `RegionalInventory` resource if an entry with the same
* [`region`][google.shopping.merchant.inventories.v1.RegionalInventory.region]
* already exists for the product.
* It might take up to 30 minutes for the new or updated `RegionalInventory`
* resource to appear in products.
* </pre>
*/
public com.google.shopping.merchant.inventories.v1.RegionalInventory insertRegionalInventory(
com.google.shopping.merchant.inventories.v1.InsertRegionalInventoryRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getInsertRegionalInventoryMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Deletes the specified `RegionalInventory` resource from the given product
* in your merchant account. It might take up to an hour for the
* `RegionalInventory` to be deleted from the specific product.
* Once you have received a successful delete response, wait for that
* period before attempting a delete again.
* </pre>
*/
public com.google.protobuf.Empty deleteRegionalInventory(
com.google.shopping.merchant.inventories.v1.DeleteRegionalInventoryRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getDeleteRegionalInventoryMethod(), getCallOptions(), request);
}
}
/**
* A stub to allow clients to do limited synchronous rpc calls to service
* RegionalInventoryService.
*
* <pre>
* Service to manage regional inventory for products. There is also separate
* `regions` resource and API to manage regions definitions.
* </pre>
*/
public static final class RegionalInventoryServiceBlockingStub
extends io.grpc.stub.AbstractBlockingStub<RegionalInventoryServiceBlockingStub> {
private RegionalInventoryServiceBlockingStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected RegionalInventoryServiceBlockingStub build(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new RegionalInventoryServiceBlockingStub(channel, callOptions);
}
/**
*
*
* <pre>
* Lists the `RegionalInventory` resources for the given product in your
* merchant account. The response might contain fewer items than specified by
* `pageSize`. If `pageToken` was returned in previous request, it can be
* used to obtain additional results.
* `RegionalInventory` resources are listed per product for a given account.
* </pre>
*/
public com.google.shopping.merchant.inventories.v1.ListRegionalInventoriesResponse
listRegionalInventories(
com.google.shopping.merchant.inventories.v1.ListRegionalInventoriesRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getListRegionalInventoriesMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Inserts a `RegionalInventory` to a given product in your
* merchant account.
* Replaces the full `RegionalInventory` resource if an entry with the same
* [`region`][google.shopping.merchant.inventories.v1.RegionalInventory.region]
* already exists for the product.
* It might take up to 30 minutes for the new or updated `RegionalInventory`
* resource to appear in products.
* </pre>
*/
public com.google.shopping.merchant.inventories.v1.RegionalInventory insertRegionalInventory(
com.google.shopping.merchant.inventories.v1.InsertRegionalInventoryRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getInsertRegionalInventoryMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Deletes the specified `RegionalInventory` resource from the given product
* in your merchant account. It might take up to an hour for the
* `RegionalInventory` to be deleted from the specific product.
* Once you have received a successful delete response, wait for that
* period before attempting a delete again.
* </pre>
*/
public com.google.protobuf.Empty deleteRegionalInventory(
com.google.shopping.merchant.inventories.v1.DeleteRegionalInventoryRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getDeleteRegionalInventoryMethod(), getCallOptions(), request);
}
}
/**
* A stub to allow clients to do ListenableFuture-style rpc calls to service
* RegionalInventoryService.
*
* <pre>
* Service to manage regional inventory for products. There is also separate
* `regions` resource and API to manage regions definitions.
* </pre>
*/
public static final class RegionalInventoryServiceFutureStub
extends io.grpc.stub.AbstractFutureStub<RegionalInventoryServiceFutureStub> {
private RegionalInventoryServiceFutureStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected RegionalInventoryServiceFutureStub build(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new RegionalInventoryServiceFutureStub(channel, callOptions);
}
/**
*
*
* <pre>
* Lists the `RegionalInventory` resources for the given product in your
* merchant account. The response might contain fewer items than specified by
* `pageSize`. If `pageToken` was returned in previous request, it can be
* used to obtain additional results.
* `RegionalInventory` resources are listed per product for a given account.
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<
com.google.shopping.merchant.inventories.v1.ListRegionalInventoriesResponse>
listRegionalInventories(
com.google.shopping.merchant.inventories.v1.ListRegionalInventoriesRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getListRegionalInventoriesMethod(), getCallOptions()), request);
}
/**
*
*
* <pre>
* Inserts a `RegionalInventory` to a given product in your
* merchant account.
* Replaces the full `RegionalInventory` resource if an entry with the same
* [`region`][google.shopping.merchant.inventories.v1.RegionalInventory.region]
* already exists for the product.
* It might take up to 30 minutes for the new or updated `RegionalInventory`
* resource to appear in products.
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<
com.google.shopping.merchant.inventories.v1.RegionalInventory>
insertRegionalInventory(
com.google.shopping.merchant.inventories.v1.InsertRegionalInventoryRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getInsertRegionalInventoryMethod(), getCallOptions()), request);
}
/**
*
*
* <pre>
* Deletes the specified `RegionalInventory` resource from the given product
* in your merchant account. It might take up to an hour for the
* `RegionalInventory` to be deleted from the specific product.
* Once you have received a successful delete response, wait for that
* period before attempting a delete again.
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<com.google.protobuf.Empty>
deleteRegionalInventory(
com.google.shopping.merchant.inventories.v1.DeleteRegionalInventoryRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getDeleteRegionalInventoryMethod(), getCallOptions()), request);
}
}
private static final int METHODID_LIST_REGIONAL_INVENTORIES = 0;
private static final int METHODID_INSERT_REGIONAL_INVENTORY = 1;
private static final int METHODID_DELETE_REGIONAL_INVENTORY = 2;
private static final class MethodHandlers<Req, Resp>
implements io.grpc.stub.ServerCalls.UnaryMethod<Req, Resp>,
io.grpc.stub.ServerCalls.ServerStreamingMethod<Req, Resp>,
io.grpc.stub.ServerCalls.ClientStreamingMethod<Req, Resp>,
io.grpc.stub.ServerCalls.BidiStreamingMethod<Req, Resp> {
private final AsyncService serviceImpl;
private final int methodId;
MethodHandlers(AsyncService serviceImpl, int methodId) {
this.serviceImpl = serviceImpl;
this.methodId = methodId;
}
@java.lang.Override
@java.lang.SuppressWarnings("unchecked")
public void invoke(Req request, io.grpc.stub.StreamObserver<Resp> responseObserver) {
switch (methodId) {
case METHODID_LIST_REGIONAL_INVENTORIES:
serviceImpl.listRegionalInventories(
(com.google.shopping.merchant.inventories.v1.ListRegionalInventoriesRequest) request,
(io.grpc.stub.StreamObserver<
com.google.shopping.merchant.inventories.v1.ListRegionalInventoriesResponse>)
responseObserver);
break;
case METHODID_INSERT_REGIONAL_INVENTORY:
serviceImpl.insertRegionalInventory(
(com.google.shopping.merchant.inventories.v1.InsertRegionalInventoryRequest) request,
(io.grpc.stub.StreamObserver<
com.google.shopping.merchant.inventories.v1.RegionalInventory>)
responseObserver);
break;
case METHODID_DELETE_REGIONAL_INVENTORY:
serviceImpl.deleteRegionalInventory(
(com.google.shopping.merchant.inventories.v1.DeleteRegionalInventoryRequest) request,
(io.grpc.stub.StreamObserver<com.google.protobuf.Empty>) responseObserver);
break;
default:
throw new AssertionError();
}
}
@java.lang.Override
@java.lang.SuppressWarnings("unchecked")
public io.grpc.stub.StreamObserver<Req> invoke(
io.grpc.stub.StreamObserver<Resp> responseObserver) {
switch (methodId) {
default:
throw new AssertionError();
}
}
}
public static final io.grpc.ServerServiceDefinition bindService(AsyncService service) {
return io.grpc.ServerServiceDefinition.builder(getServiceDescriptor())
.addMethod(
getListRegionalInventoriesMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.shopping.merchant.inventories.v1.ListRegionalInventoriesRequest,
com.google.shopping.merchant.inventories.v1.ListRegionalInventoriesResponse>(
service, METHODID_LIST_REGIONAL_INVENTORIES)))
.addMethod(
getInsertRegionalInventoryMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.shopping.merchant.inventories.v1.InsertRegionalInventoryRequest,
com.google.shopping.merchant.inventories.v1.RegionalInventory>(
service, METHODID_INSERT_REGIONAL_INVENTORY)))
.addMethod(
getDeleteRegionalInventoryMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.shopping.merchant.inventories.v1.DeleteRegionalInventoryRequest,
com.google.protobuf.Empty>(service, METHODID_DELETE_REGIONAL_INVENTORY)))
.build();
}
private abstract static class RegionalInventoryServiceBaseDescriptorSupplier
implements io.grpc.protobuf.ProtoFileDescriptorSupplier,
io.grpc.protobuf.ProtoServiceDescriptorSupplier {
RegionalInventoryServiceBaseDescriptorSupplier() {}
@java.lang.Override
public com.google.protobuf.Descriptors.FileDescriptor getFileDescriptor() {
return com.google.shopping.merchant.inventories.v1.RegionalInventoryProto.getDescriptor();
}
@java.lang.Override
public com.google.protobuf.Descriptors.ServiceDescriptor getServiceDescriptor() {
return getFileDescriptor().findServiceByName("RegionalInventoryService");
}
}
private static final class RegionalInventoryServiceFileDescriptorSupplier
extends RegionalInventoryServiceBaseDescriptorSupplier {
RegionalInventoryServiceFileDescriptorSupplier() {}
}
private static final class RegionalInventoryServiceMethodDescriptorSupplier
extends RegionalInventoryServiceBaseDescriptorSupplier
implements io.grpc.protobuf.ProtoMethodDescriptorSupplier {
private final java.lang.String methodName;
RegionalInventoryServiceMethodDescriptorSupplier(java.lang.String methodName) {
this.methodName = methodName;
}
@java.lang.Override
public com.google.protobuf.Descriptors.MethodDescriptor getMethodDescriptor() {
return getServiceDescriptor().findMethodByName(methodName);
}
}
private static volatile io.grpc.ServiceDescriptor serviceDescriptor;
public static io.grpc.ServiceDescriptor getServiceDescriptor() {
io.grpc.ServiceDescriptor result = serviceDescriptor;
if (result == null) {
synchronized (RegionalInventoryServiceGrpc.class) {
result = serviceDescriptor;
if (result == null) {
serviceDescriptor =
result =
io.grpc.ServiceDescriptor.newBuilder(SERVICE_NAME)
.setSchemaDescriptor(new RegionalInventoryServiceFileDescriptorSupplier())
.addMethod(getListRegionalInventoriesMethod())
.addMethod(getInsertRegionalInventoryMethod())
.addMethod(getDeleteRegionalInventoryMethod())
.build();
}
}
}
return result;
}
}
|
oracle/graal | 35,128 | wasm/src/org.graalvm.wasm.test/src/org/graalvm/wasm/test/suites/bytecode/BytecodeSuite.java | /*
* Copyright (c) 2022, 2025, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* The Universal Permissive License (UPL), Version 1.0
*
* Subject to the condition set forth below, permission is hereby granted to any
* person obtaining a copy of this software, associated documentation and/or
* data (collectively the "Software"), free of charge and under any and all
* copyright rights in the Software, and any and all patent rights owned or
* freely licensable by each licensor hereunder covering either (i) the
* unmodified Software as contributed to or provided by such licensor, or (ii)
* the Larger Works (as defined below), to deal in both
*
* (a) the Software, and
*
* (b) any piece of software and/or hardware listed in the lrgrwrks.txt file if
* one is included with the Software each a "Larger Work" to which the Software
* is contributed by such licensors),
*
* without restriction, including without limitation the rights to copy, create
* derivative works of, display, perform, and distribute the Software and make,
* use, sell, offer for sale, import, export, have made, and have sold the
* Software and the Larger Work(s), and to sublicense the foregoing rights on
* either these or other terms.
*
* This license is subject to the following condition:
*
* The above copyright notice and either this complete permission notice or at a
* minimum a reference to the UPL must be included in all copies or substantial
* portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package org.graalvm.wasm.test.suites.bytecode;
import java.util.function.Consumer;
import org.graalvm.wasm.WasmType;
import org.graalvm.wasm.constants.Bytecode;
import org.graalvm.wasm.constants.ExceptionHandlerType;
import org.graalvm.wasm.constants.SegmentMode;
import org.graalvm.wasm.parser.bytecode.RuntimeBytecodeGen;
import org.junit.Assert;
import org.junit.Test;
/**
* Tests the correctness of the bytecode produced by the {@link RuntimeBytecodeGen}.
*/
public class BytecodeSuite {
private static void test(Consumer<RuntimeBytecodeGen> b, byte[] expected) {
RuntimeBytecodeGen bytecodeGen = new RuntimeBytecodeGen();
b.accept(bytecodeGen);
Assert.assertArrayEquals(expected, bytecodeGen.toArray());
}
private static void testAssertion(Consumer<RuntimeBytecodeGen> b, String errorMessage) {
RuntimeBytecodeGen bytecodeGen = new RuntimeBytecodeGen();
try {
b.accept(bytecodeGen);
Assert.fail("Should have thrown assertion error");
} catch (AssertionError e) {
Assert.assertTrue("Invalid assertion message: " + e.getMessage(), e.getMessage().contains(errorMessage));
}
}
@Test
public void testEmptyLabel() {
test(b -> b.addLabel(0, 0, 0), new byte[]{Bytecode.SKIP_LABEL_U8, Bytecode.LABEL_U8, 0x00});
}
// Test common result type encoding
@Test
public void testLabelU8ResultNum() {
test(b -> b.addLabel(1, 0, WasmType.NUM_COMMON_TYPE), new byte[]{Bytecode.SKIP_LABEL_U8, Bytecode.LABEL_U8, (byte) 0x80});
}
@Test
public void testLabelU8ResultRef() {
test(b -> b.addLabel(1, 0, WasmType.OBJ_COMMON_TYPE), new byte[]{Bytecode.SKIP_LABEL_U8, Bytecode.LABEL_U8, (byte) 0xC0});
}
@Test
public void testLabelU8ResultMix() {
testAssertion(b -> b.addLabel(1, 0, WasmType.MIX_COMMON_TYPE), "Single result value must either have number or reference type.");
}
@Test
public void testLabelU16ResultNum() {
test(b -> b.addLabel(2, 0, WasmType.NUM_COMMON_TYPE), new byte[]{Bytecode.SKIP_LABEL_U16, Bytecode.LABEL_U16, 0x42, 0x00});
}
@Test
public void testLabelU16ResultRef() {
test(b -> b.addLabel(2, 0, WasmType.OBJ_COMMON_TYPE), new byte[]{Bytecode.SKIP_LABEL_U16, Bytecode.LABEL_U16, (byte) 0x82, 0x00});
}
@Test
public void testLabelU16ResultMix() {
test(b -> b.addLabel(2, 0, WasmType.MIX_COMMON_TYPE), new byte[]{Bytecode.SKIP_LABEL_U16, Bytecode.LABEL_U16, (byte) 0xC2, 0x00});
}
@Test
public void testLabelI32ResultNum() {
test(b -> b.addLabel(64, 0, WasmType.NUM_COMMON_TYPE), new byte[]{Bytecode.SKIP_LABEL_I32, Bytecode.LABEL_I32, 0x01, 0x40, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00});
}
@Test
public void testLabelI32ResultRef() {
test(b -> b.addLabel(64, 0, WasmType.OBJ_COMMON_TYPE), new byte[]{Bytecode.SKIP_LABEL_I32, Bytecode.LABEL_I32, 0x02, 0x40, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00});
}
@Test
public void testLabelI32ResultMix() {
test(b -> b.addLabel(64, 0, WasmType.MIX_COMMON_TYPE), new byte[]{Bytecode.SKIP_LABEL_I32, Bytecode.LABEL_I32, 0x03, 0x40, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00});
}
@Test
public void testLabelU8MaxStackSize() {
test(b -> b.addLabel(0, 63, 0), new byte[]{Bytecode.SKIP_LABEL_U8, Bytecode.LABEL_U8, 0x3F});
}
@Test
public void testLabelU16MinStackSize() {
test(b -> b.addLabel(0, 64, 0), new byte[]{Bytecode.SKIP_LABEL_U16, Bytecode.LABEL_U16, 0x00, 0x40});
}
@Test
public void testLabelU16MaxResults() {
test(b -> b.addLabel(63, 0, WasmType.NUM_COMMON_TYPE), new byte[]{Bytecode.SKIP_LABEL_U16, Bytecode.LABEL_U16, 0x7F, 0x00});
}
@Test
public void testLabelU16MaxStackSize() {
test(b -> b.addLabel(0, 255, 0), new byte[]{Bytecode.SKIP_LABEL_U16, Bytecode.LABEL_U16, 0x00, (byte) 0xFF});
}
@Test
public void testLabelI32MinStackSize() {
test(b -> b.addLabel(0, 256, 0), new byte[]{Bytecode.SKIP_LABEL_I32, Bytecode.LABEL_I32, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00});
}
@Test
public void testAddLoopLabel() {
test(b -> b.addLoopLabel(1, 20, WasmType.NUM_COMMON_TYPE), new byte[]{Bytecode.SKIP_LABEL_U8, Bytecode.LABEL_U8, (byte) 0x94, Bytecode.LOOP});
}
@Test
public void testInvalidResultType() {
testAssertion(b -> b.addLabel(1, 1, 5), "invalid result type");
}
@Test
public void testBrU8Min() {
test(b -> b.addBranch(1), new byte[]{Bytecode.BR_U8, 0x00});
}
@Test
public void testBrU8Max() {
final byte[] expected = new byte[256];
expected[254] = Bytecode.BR_U8;
expected[255] = (byte) 0xFF;
test(b -> {
for (int i = 0; i < 254; i++) {
b.addOp(0);
}
b.addBranch(0);
}, expected);
}
@Test
public void testBrI32MinForward() {
test(b -> b.addBranch(2), new byte[]{Bytecode.BR_I32, 0x01, 0x00, 0x00, 0x00});
}
@Test
public void testBrI32MaxForward() {
test(b -> b.addBranch(2147483647), new byte[]{Bytecode.BR_I32, (byte) 0xFE, (byte) 0xFF, (byte) 0xFF, 0x7F});
}
@Test
public void testBrI32MinBackward() {
final byte[] expected = new byte[260];
expected[255] = Bytecode.BR_I32;
expected[256] = 0x00;
expected[257] = (byte) 0xFF;
expected[258] = (byte) 0xFF;
expected[259] = (byte) 0xFF;
test(b -> {
for (int i = 0; i < 255; i++) {
b.addOp(0);
}
b.addBranch(0);
}, expected);
}
@Test
public void testBrIfU8Min() {
test(b -> b.addBranchIf(1), new byte[]{Bytecode.BR_IF_U8, 0x00, 0x00, 0x00});
}
@Test
public void testBrIfU8Max() {
final byte[] expected = new byte[258];
expected[254] = Bytecode.BR_IF_U8;
expected[255] = (byte) 0xFF;
test(b -> {
for (int i = 0; i < 254; i++) {
b.addOp(0);
}
b.addBranchIf(0);
}, expected);
}
@Test
public void testBrIfI32MinForward() {
test(b -> b.addBranchIf(2), new byte[]{Bytecode.BR_IF_I32, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00});
}
@Test
public void testBrIfI32MaxForward() {
test(b -> b.addBranchIf(2147483647), new byte[]{Bytecode.BR_IF_I32, (byte) 0xFE, (byte) 0xFF, (byte) 0xFF, 0x7F, 0x00, 0x00});
}
@Test
public void testBrIfI32MinBackward() {
final byte[] expected = new byte[262];
expected[255] = Bytecode.BR_IF_I32;
expected[256] = 0x00;
expected[257] = (byte) 0xFF;
expected[258] = (byte) 0xFF;
expected[259] = (byte) 0xFF;
test(b -> {
for (int i = 0; i < 255; i++) {
b.addOp(0);
}
b.addBranchIf(0);
}, expected);
}
@Test
public void testBrTableU8Min() {
test(b -> b.addBranchTable(1), new byte[]{Bytecode.BR_TABLE_U8, 0x01, 0x00, 0x00});
}
@Test
public void testBrTableU8Max() {
test(b -> b.addBranchTable(255), new byte[]{Bytecode.BR_TABLE_U8, (byte) 0xFF, 0x00, 0x00});
}
@Test
public void testBrTableI32Min() {
test(b -> b.addBranchTable(256), new byte[]{Bytecode.BR_TABLE_I32, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00});
}
@Test
public void testCallU8Min() {
test(b -> b.addCall(0, 0), new byte[]{Bytecode.CALL_U8, 0x00, 0x00});
}
@Test
public void testCallU8MaxNodeIndex() {
test(b -> b.addCall(255, 0), new byte[]{Bytecode.CALL_U8, (byte) 0xFF, 0x00});
}
@Test
public void testCallU8MaxFunctionIndex() {
test(b -> b.addCall(0, 255), new byte[]{Bytecode.CALL_U8, 0x00, (byte) 0xFF});
}
@Test
public void testCallI32MinNodeIndex() {
test(b -> b.addCall(256, 0), new byte[]{Bytecode.CALL_I32, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00});
}
@Test
public void testCallI32MinFunctionIndex() {
test(b -> b.addCall(0, 256), new byte[]{Bytecode.CALL_I32, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00});
}
@Test
public void testCallIndirectU8Min() {
test(b -> b.addIndirectCall(0, 0, 0), new byte[]{Bytecode.CALL_INDIRECT_U8, 0x00, 0x00, 0x00});
}
@Test
public void testCallIndirectU8MaxNodeIndex() {
test(b -> b.addIndirectCall(255, 0, 0), new byte[]{Bytecode.CALL_INDIRECT_U8, (byte) 0xFF, 0x00, 0x00});
}
@Test
public void testCallIndirectU8MaxTypeIndex() {
test(b -> b.addIndirectCall(0, 255, 0), new byte[]{Bytecode.CALL_INDIRECT_U8, 0x00, (byte) 0xFF, 0x00});
}
@Test
public void testCallIndirectU8MaxTableIndex() {
test(b -> b.addIndirectCall(0, 0, 255), new byte[]{Bytecode.CALL_INDIRECT_U8, 0x00, 0x00, (byte) 0xFF});
}
@Test
public void testCallIndirectI32MinNodeIndex() {
test(b -> b.addIndirectCall(256, 0, 0), new byte[]{Bytecode.CALL_INDIRECT_I32, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00});
}
@Test
public void testCallIndirectI32MinTypeIndex() {
test(b -> b.addIndirectCall(0, 256, 0), new byte[]{Bytecode.CALL_INDIRECT_I32, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00});
}
@Test
public void testCallIndirectI32MinTableIndex() {
test(b -> b.addIndirectCall(0, 0, 256), new byte[]{Bytecode.CALL_INDIRECT_I32, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00});
}
@Test
public void testI32SignedI8Min() {
test(b -> b.addSigned(Bytecode.I32_CONST_I8, Bytecode.I32_CONST_I32, -128), new byte[]{Bytecode.I32_CONST_I8, (byte) 0x80});
}
@Test
public void testI32SignedI8Max() {
test(b -> b.addSigned(Bytecode.I32_CONST_I8, Bytecode.I32_CONST_I32, 127), new byte[]{Bytecode.I32_CONST_I8, 0x7F});
}
@Test
public void testI32SignedI32MinNegative() {
test(b -> b.addSigned(Bytecode.I32_CONST_I8, Bytecode.I32_CONST_I32, -129), new byte[]{Bytecode.I32_CONST_I32, (byte) 0x7F, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF});
}
@Test
public void testI32SignedI32MinPositive() {
test(b -> b.addSigned(Bytecode.I32_CONST_I8, Bytecode.I32_CONST_I32, 128), new byte[]{Bytecode.I32_CONST_I32, (byte) 0x80, 0x00, 0x00, 0x00});
}
@Test
public void testI64SignedU8Min() {
test(b -> b.addSigned(Bytecode.I64_CONST_I8, Bytecode.I64_CONST_I64, -128L), new byte[]{Bytecode.I64_CONST_I8, (byte) 0x80});
}
@Test
public void testI64SignedU8Max() {
test(b -> b.addSigned(Bytecode.I64_CONST_I8, Bytecode.I64_CONST_I64, 127L), new byte[]{Bytecode.I64_CONST_I8, 0x7F});
}
@Test
public void testI64SignedI64MinNegative() {
test(b -> b.addSigned(Bytecode.I64_CONST_I8, Bytecode.I64_CONST_I64, -129L),
new byte[]{Bytecode.I64_CONST_I64, (byte) 0x7F, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF});
}
@Test
public void testI64SignedI64MinPositive() {
test(b -> b.addSigned(Bytecode.I64_CONST_I8, Bytecode.I64_CONST_I64, 128L), new byte[]{Bytecode.I64_CONST_I64, (byte) 0x80, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00});
}
@Test
public void testUnsignedU8Min() {
test(b -> b.addUnsigned(Bytecode.LOCAL_GET_U8, Bytecode.LOCAL_GET_I32, 0), new byte[]{Bytecode.LOCAL_GET_U8, 0x00});
}
@Test
public void testUnsignedU8Max() {
test(b -> b.addUnsigned(Bytecode.LOCAL_GET_U8, Bytecode.LOCAL_GET_I32, 255), new byte[]{Bytecode.LOCAL_GET_U8, (byte) 0xFF});
}
@Test
public void testUnsignedI32Min() {
test(b -> b.addUnsigned(Bytecode.LOCAL_GET_U8, Bytecode.LOCAL_GET_I32, 256), new byte[]{Bytecode.LOCAL_GET_I32, 0x00, 0x01, 0x00, 0x00});
}
@Test
public void testMemoryInstructionU8Min() {
test(b -> b.addMemoryInstruction(Bytecode.I32_LOAD, Bytecode.I32_LOAD_U8, Bytecode.I32_LOAD_I32, 0, 0, false), new byte[]{Bytecode.I32_LOAD_U8, 0x00});
}
@Test
public void testMemoryInstructionU8Max() {
test(b -> b.addMemoryInstruction(Bytecode.I32_LOAD, Bytecode.I32_LOAD_U8, Bytecode.I32_LOAD_I32, 0, 255, false), new byte[]{Bytecode.I32_LOAD_U8, (byte) 0xFF});
}
@Test
public void testMemoryInstructionI32Min() {
test(b -> b.addMemoryInstruction(Bytecode.I32_LOAD, Bytecode.I32_LOAD_U8, Bytecode.I32_LOAD_I32, 0, 256, false), new byte[]{Bytecode.I32_LOAD_I32, 0x00, 0x01, 0x00, 0x00});
}
@Test
public void testMemoryInstructionI32Max() {
test(b -> b.addMemoryInstruction(Bytecode.I32_LOAD, Bytecode.I32_LOAD_U8, Bytecode.I32_LOAD_I32, 0, 4294967295L, false),
new byte[]{Bytecode.I32_LOAD_I32, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF});
}
@Test
public void testMemoryInstructionIndexType64() {
test(b -> b.addMemoryInstruction(Bytecode.I32_LOAD, Bytecode.I32_LOAD_U8, Bytecode.I32_LOAD_I32, 0, 0, true), new byte[]{Bytecode.I32_LOAD, (byte) 0x81, 0x00, 0x00, 0x00, 0x00, 0x00});
}
@Test
public void testMemoryInstructionMinI64Offset() {
test(b -> b.addMemoryInstruction(Bytecode.I32_LOAD, Bytecode.I32_LOAD_U8, Bytecode.I32_LOAD_I32, 0, 4294967296L, false),
new byte[]{Bytecode.I32_LOAD, 0x08, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00});
}
@Test
public void testMemoryInstructionIndexType64MaxU8Offset() {
test(b -> b.addMemoryInstruction(Bytecode.I32_LOAD, Bytecode.I32_LOAD_U8, Bytecode.I32_LOAD_I32, 0, 255, true),
new byte[]{Bytecode.I32_LOAD, (byte) 0x81, 0x00, 0x00, 0x00, 0x00, (byte) 0xFF});
}
@Test
public void testMemoryInstructionIndexType64MinU32Offset() {
test(b -> b.addMemoryInstruction(Bytecode.I32_LOAD, Bytecode.I32_LOAD_U8, Bytecode.I32_LOAD_I32, 0, 256, true),
new byte[]{Bytecode.I32_LOAD, (byte) 0x84, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00});
}
@Test
public void testMemoryInstructionIndexType64MaxU32Offset() {
test(b -> b.addMemoryInstruction(Bytecode.I32_LOAD, Bytecode.I32_LOAD_U8, Bytecode.I32_LOAD_I32, 0, 4294967295L, true),
new byte[]{Bytecode.I32_LOAD, (byte) 0x84, 0x00, 0x00, 0x00, 0x00, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF});
}
@Test
public void testMemoryInstructionIndexType64MinI64Offset() {
test(b -> b.addMemoryInstruction(Bytecode.I32_LOAD, Bytecode.I32_LOAD_U8, Bytecode.I32_LOAD_I32, 0, 4294967296L, true),
new byte[]{Bytecode.I32_LOAD, (byte) 0x88, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00});
}
@Test
public void testMemoryInstructionInvalidOpcode() {
testAssertion(b -> b.addMemoryInstruction(256, Bytecode.I32_LOAD_U8, Bytecode.I32_LOAD_I32, 0, 1, false), "opcode does not fit into byte");
}
@Test
public void testMemoryInstructionInvalidOpcodeU8() {
testAssertion(b -> b.addMemoryInstruction(Bytecode.I32_LOAD, 256, Bytecode.I32_LOAD_I32, 0, 1, false), "opcode does not fit into byte");
}
@Test
public void testMemoryInstructionInvalidOpcodeI32() {
testAssertion(b -> b.addMemoryInstruction(Bytecode.I32_LOAD, Bytecode.I32_LOAD_U8, 256, 0, 1, false), "opcode does not fit into byte");
}
@Test
public void testAddMin() {
test(b -> b.addOp(0x00), new byte[]{0x00});
}
@Test
public void testAddMax() {
test(b -> b.addOp(0xFF), new byte[]{(byte) 0xFF});
}
@Test
public void testInvalidAdd() {
testAssertion(b -> b.addOp(256), "opcode does not fit into byte");
}
@Test
public void testAddImmediateMin() {
test(b -> b.addOp(0x01, 0), new byte[]{0x01, 0x00, 0x00, 0x00, 0x00});
}
@Test
public void testAddImmediateMax() {
test(b -> b.addOp(0x01, 0xFFFFFFFF), new byte[]{0x01, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF});
}
@Test
public void testInvalidAddImmediate() {
testAssertion(b -> b.addOp(256, 0), "opcode does not fit into byte");
}
@Test
public void testAddImmediate64Max() {
test(b -> b.addOp(0x01, 0xFFFFFFFFFFFFFFFFL), new byte[]{0x01, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF});
}
@Test
public void testInvalidAddImmediate64() {
testAssertion(b -> b.addOp(256, 0xFFL), "opcode does not fit into byte");
}
@Test
public void testAddImmediateMax2() {
test(b -> b.addOp(0x01, 0, 0xFFFFFFFF), new byte[]{0x01, 0x00, 0x00, 0x00, 0x00, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF});
}
@Test
public void testInvalidAddImmediate2() {
testAssertion(b -> b.addOp(256, 0, 0), "opcode does not fit into byte");
}
@Test
public void testActiveDataHeaderMinU8Length() {
test(b -> b.addDataHeader(0, null, -1, -1), new byte[]{0x40, 0x00});
}
@Test
public void testActiveDataHeaderMaxU8Length() {
test(b -> b.addDataHeader(255, null, -1, -1), new byte[]{0x40, (byte) 0xFF});
}
@Test
public void testActiveDataHeaderMinU16Length() {
test(b -> b.addDataHeader(256, null, -1, -1), new byte[]{(byte) 0x80, 0x00, 0x01});
}
@Test
public void testActiveDataHeaderMaxU16Length() {
test(b -> b.addDataHeader(65535, null, -1, -1), new byte[]{(byte) 0x80, (byte) 0xFF, (byte) 0xFF});
}
@Test
public void testActiveDataHeaderMinI32Length() {
test(b -> b.addDataHeader(65536, null, -1, -1), new byte[]{(byte) 0xC0, 0x00, 0x00, 0x01, 0x00});
}
private static byte[] byteArrayConcat(byte[] a, byte[] b) {
byte[] result = new byte[a.length + b.length];
System.arraycopy(a, 0, result, 0, a.length);
System.arraycopy(b, 0, result, a.length, b.length);
return result;
}
@Test
public void testActiveDataHeaderMaxU8OffsetBytecodeLength() {
byte[] offsetBytecode = new byte[255];
test(b -> b.addDataHeader(1, offsetBytecode, -1, -1),
byteArrayConcat(new byte[]{0x42, 0x01, (byte) 0xFF}, offsetBytecode));
}
@Test
public void testActiveDataHeaderMinU16OffsetBytecodeLength() {
byte[] offsetBytecode = new byte[256];
test(b -> b.addDataHeader(1, offsetBytecode, -1, -1),
byteArrayConcat(new byte[]{0x44, 0x01, 0x00, 0x01}, offsetBytecode));
}
@Test
public void testActiveDataHeaderMaxU16OffsetBytecodeLength() {
byte[] offsetBytecode = new byte[65535];
test(b -> b.addDataHeader(1, offsetBytecode, -1, -1),
byteArrayConcat(new byte[]{0x44, 0x01, (byte) 0xFF, (byte) 0xFF}, offsetBytecode));
}
@Test
public void testActiveDataHeaderMinI32OffsetBytecodeLength() {
byte[] offsetBytecode = new byte[65536];
test(b -> b.addDataHeader(1, offsetBytecode, -1, -1),
byteArrayConcat(new byte[]{0x46, 0x01, 0x00, 0x00, 0x01, 0x00}, offsetBytecode));
}
@Test
public void testActiveDataHeaderMaxU8OffsetAddress() {
test(b -> b.addDataHeader(1, null, 255, -1), new byte[]{0x52, 0x01, (byte) 0xFF});
}
@Test
public void testActiveDataHeaderMinU16OffsetAddress() {
test(b -> b.addDataHeader(1, null, 256, -1), new byte[]{0x54, 0x01, 0x00, 0x01});
}
@Test
public void testActiveDataHeaderMaxU16OffsetAddress() {
test(b -> b.addDataHeader(1, null, 65535, -1), new byte[]{0x54, 0x01, (byte) 0xFF, (byte) 0xFF});
}
@Test
public void testActiveDataHeaderMinU32OffsetAddress() {
test(b -> b.addDataHeader(1, null, 65536, -1), new byte[]{0x56, 0x01, 0x00, 0x00, 0x01, 0x00});
}
@Test
public void testActiveDataHeaderMaxU32OffsetAddress() {
test(b -> b.addDataHeader(1, null, 4294967295L, -1), new byte[]{0x56, 0x01, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF});
}
@Test
public void testActiveDataHeaderMinI64OffsetAddress() {
test(b -> b.addDataHeader(1, null, 4294967296L, -1), new byte[]{0x58, 0x01, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00});
}
@Test
public void testActiveDataHeaderGlobalIndexAndOffsetAddress() {
testAssertion(b -> b.addDataHeader(1, new byte[]{Bytecode.GLOBAL_GET_U8, 1}, 1, -1), "data header does not allow offset bytecode and offset address");
}
@Test
public void testPassiveDataHeaderMin() {
test(b -> b.addDataHeader(SegmentMode.PASSIVE, 1), new byte[]{0x41, 0x01});
}
@Test
public void testInvalidDataHeaderSegmentMode() {
testAssertion(b -> b.addDataHeader(3, 1), "invalid segment mode in data header");
}
@Test
public void testActiveModeInPassiveDataHeader() {
testAssertion(b -> b.addDataHeader(SegmentMode.ACTIVE, 1), "invalid active segment mode in passive data header");
}
@Test
public void testDataRuntimeHeaderMin() {
test(b -> b.addDataRuntimeHeader(1), new byte[]{0x01});
}
@Test
public void testDataRuntimeHeaderMaxInlineLength() {
test(b -> b.addDataRuntimeHeader(63), new byte[]{0x3F});
}
@Test
public void testDataRuntimeHeaderMinU8Length() {
test(b -> b.addDataRuntimeHeader(64), new byte[]{0x40, 0x40});
}
@Test
public void testDataRuntimeHeaderMaxU8Length() {
test(b -> b.addDataRuntimeHeader(255), new byte[]{0x40, (byte) 0xFF});
}
@Test
public void testDataRuntimeHeaderMinU16Length() {
test(b -> b.addDataRuntimeHeader(256), new byte[]{(byte) 0x80, 0x00, 0x01});
}
@Test
public void testDataRuntimeHeaderMaxU16Length() {
test(b -> b.addDataRuntimeHeader(65535), new byte[]{(byte) 0x80, (byte) 0xFF, (byte) 0xFF});
}
@Test
public void testDataRuntimeHeaderMinI32Length() {
test(b -> b.addDataRuntimeHeader(65536), new byte[]{(byte) 0xC0, 0x00, 0x00, 0x01, 0x00});
}
@Test
public void testElemHeaderMin() {
test(b -> b.addElemHeader(SegmentMode.ACTIVE, 0, WasmType.FUNCREF_TYPE, 0, null, -1), new byte[]{0x40, 0x10, 0x00});
}
@Test
public void testElemHeaderMinU8Count() {
test(b -> b.addElemHeader(SegmentMode.ACTIVE, 1, WasmType.FUNCREF_TYPE, 0, null, -1), new byte[]{0x40, 0x10, 0x01});
}
@Test
public void testElemHeaderMaxU8Count() {
test(b -> b.addElemHeader(SegmentMode.ACTIVE, 255, WasmType.FUNCREF_TYPE, 0, null, -1), new byte[]{0x40, 0x10, (byte) 0xFF});
}
@Test
public void testElemHeaderMinU16Count() {
test(b -> b.addElemHeader(SegmentMode.ACTIVE, 256, WasmType.FUNCREF_TYPE, 0, null, -1), new byte[]{(byte) 0x80, 0x10, 0x00, 0x01});
}
@Test
public void testElemHeaderMaxU16Count() {
test(b -> b.addElemHeader(SegmentMode.ACTIVE, 65535, WasmType.FUNCREF_TYPE, 0, null, -1), new byte[]{(byte) 0x80, 0x10, (byte) 0xFF, (byte) 0xFF});
}
@Test
public void testElemHeaderMinI32Count() {
test(b -> b.addElemHeader(SegmentMode.ACTIVE, 65536, WasmType.FUNCREF_TYPE, 0, null, -1), new byte[]{(byte) 0xC0, 0x10, 0x00, 0x00, 0x01, 0x00});
}
@Test
public void testElemHeaderPassive() {
test(b -> b.addElemHeader(SegmentMode.PASSIVE, 8, WasmType.FUNCREF_TYPE, 0, null, -1), new byte[]{0x40, 0x11, 0x08});
}
@Test
public void testElemHeaderDeclarative() {
test(b -> b.addElemHeader(SegmentMode.DECLARATIVE, 8, WasmType.FUNCREF_TYPE, 0, null, -1), new byte[]{0x40, 0x12, 0x08});
}
@Test
public void testElemHeaderExternref() {
test(b -> b.addElemHeader(SegmentMode.ACTIVE, 8, WasmType.EXTERNREF_TYPE, 0, null, -1), new byte[]{0x40, 0x20, 0x08});
}
@Test
public void testElemHeaderExnref() {
test(b -> b.addElemHeader(SegmentMode.ACTIVE, 8, WasmType.EXNREF_TYPE, 0, null, -1), new byte[]{0x40, 0x30, 0x08});
}
@Test
public void testElemHeaderMinU8TableIndex() {
test(b -> b.addElemHeader(SegmentMode.ACTIVE, 0, WasmType.FUNCREF_TYPE, 1, null, -1), new byte[]{0x50, 0x10, 0x00, 0x01});
}
@Test
public void testElemHeaderMaxU8TableIndex() {
test(b -> b.addElemHeader(SegmentMode.ACTIVE, 0, WasmType.FUNCREF_TYPE, 255, null, -1), new byte[]{0x50, 0x10, 0x00, (byte) 0xFF});
}
@Test
public void testElemHeaderMinU16TableIndex() {
test(b -> b.addElemHeader(SegmentMode.ACTIVE, 0, WasmType.FUNCREF_TYPE, 256, null, -1), new byte[]{0x60, 0x10, 0x00, 0x00, 0x01});
}
@Test
public void testElemHeaderMaxU16TableIndex() {
test(b -> b.addElemHeader(SegmentMode.ACTIVE, 0, WasmType.FUNCREF_TYPE, 65535, null, -1), new byte[]{0x60, 0x10, 0x00, (byte) 0xFF, (byte) 0xFF});
}
@Test
public void testElemHeaderMinI32TableIndex() {
test(b -> b.addElemHeader(SegmentMode.ACTIVE, 0, WasmType.FUNCREF_TYPE, 65536, null, -1), new byte[]{0x70, 0x10, 0x00, 0x00, 0x00, 0x01, 0x00});
}
@Test
public void testElemHeaderMinU8OffsetBytecodeLength() {
byte[] offsetBytecode = new byte[0];
test(b -> b.addElemHeader(SegmentMode.ACTIVE, 0, WasmType.FUNCREF_TYPE, 0, offsetBytecode, -1),
byteArrayConcat(new byte[]{0x44, 0x10, 0x00, 0x00}, offsetBytecode));
}
@Test
public void testElemHeaderMaxU8OffsetBytecodeLength() {
byte[] offsetBytecode = new byte[255];
test(b -> b.addElemHeader(SegmentMode.ACTIVE, 0, WasmType.FUNCREF_TYPE, 0, offsetBytecode, -1),
byteArrayConcat(new byte[]{0x44, 0x10, 0x00, (byte) 0xFF}, offsetBytecode));
}
@Test
public void testElemHeaderMinU16OffsetBytecodeLength() {
byte[] offsetBytecode = new byte[256];
test(b -> b.addElemHeader(SegmentMode.ACTIVE, 0, WasmType.FUNCREF_TYPE, 0, offsetBytecode, -1),
byteArrayConcat(new byte[]{0x48, 0x10, 0x00, 0x00, 0x01}, offsetBytecode));
}
@Test
public void testElemHeaderMaxU16OffsetBytecodeLength() {
byte[] offsetBytecode = new byte[65535];
test(b -> b.addElemHeader(SegmentMode.ACTIVE, 0, WasmType.FUNCREF_TYPE, 0, offsetBytecode, -1),
byteArrayConcat(new byte[]{0x48, 0x10, 0x00, (byte) 0xFF, (byte) 0xFF}, offsetBytecode));
}
@Test
public void testElemHeaderMinI32OffsetBytecodeLength() {
byte[] offsetBytecode = new byte[65536];
test(b -> b.addElemHeader(SegmentMode.ACTIVE, 0, WasmType.FUNCREF_TYPE, 0, offsetBytecode, -1),
byteArrayConcat(new byte[]{0x4C, 0x10, 0x00, 0x00, 0x00, 0x01, 0x00}, offsetBytecode));
}
@Test
public void testElemHeaderMinU8OffsetAddress() {
test(b -> b.addElemHeader(SegmentMode.ACTIVE, 0, WasmType.FUNCREF_TYPE, 0, null, 0), new byte[]{0x41, 0x10, 0x00, 0x00});
}
@Test
public void testElemHeaderMaxU8OffsetAddress() {
test(b -> b.addElemHeader(SegmentMode.ACTIVE, 0, WasmType.FUNCREF_TYPE, 0, null, 255), new byte[]{0x41, 0x10, 0x00, (byte) 0xFF});
}
@Test
public void testElemHeaderMinU16OffsetAddress() {
test(b -> b.addElemHeader(SegmentMode.ACTIVE, 0, WasmType.FUNCREF_TYPE, 0, null, 256), new byte[]{0x42, 0x10, 0x00, 0x00, 0x01});
}
@Test
public void testElemHeaderMaxU16OffsetAddress() {
test(b -> b.addElemHeader(SegmentMode.ACTIVE, 0, WasmType.FUNCREF_TYPE, 0, null, 65535), new byte[]{0x42, 0x10, 0x00, (byte) 0xFF, (byte) 0xFF});
}
@Test
public void testElemHeaderMinI32OffsetAddress() {
test(b -> b.addElemHeader(SegmentMode.ACTIVE, 0, WasmType.FUNCREF_TYPE, 0, null, 65536), new byte[]{0x43, 0x10, 0x00, 0x00, 0x00, 0x01, 0x00});
}
@Test
public void testInvalidElemHeaderGlobalIndexAndOffsetAddress() {
testAssertion(b -> b.addElemHeader(SegmentMode.ACTIVE, 0, WasmType.FUNCREF_TYPE, 0, new byte[]{Bytecode.GLOBAL_GET_U8, 1}, 1), "elem header does not allow offset bytecode and offset address");
}
@Test
public void testInvalidElemHeaderSegmentMode() {
testAssertion(b -> b.addElemHeader(4, 0, WasmType.FUNCREF_TYPE, 0, null, 1), "invalid segment mode in elem header");
}
@Test
public void testInvalidElemHeaderElemType() {
testAssertion(b -> b.addElemHeader(SegmentMode.ACTIVE, 0, WasmType.I32_TYPE, 0, null, 1), "invalid elem type in elem header");
}
@Test
public void testElemNull() {
test(RuntimeBytecodeGen::addElemNull, new byte[]{0x10});
}
@Test
public void testElemMinFunctionIndex() {
test(b -> b.addElemFunctionIndex(0), new byte[]{0x00});
}
@Test
public void testElemMaxInlineFunctionIndex() {
test(b -> b.addElemFunctionIndex(15), new byte[]{0x0F});
}
@Test
public void testElemMinU8FunctionIndex() {
test(b -> b.addElemFunctionIndex(16), new byte[]{0x20, 0x10});
}
@Test
public void testElemMaxU8FunctionIndex() {
test(b -> b.addElemFunctionIndex(255), new byte[]{0x20, (byte) 0xFF});
}
@Test
public void testElemMinU16FunctionIndex() {
test(b -> b.addElemFunctionIndex(256), new byte[]{0x40, 0x00, 0x01});
}
@Test
public void testElemMaxU16FunctionIndex() {
test(b -> b.addElemFunctionIndex(65535), new byte[]{0x40, (byte) 0xFF, (byte) 0xFF});
}
@Test
public void testElemMinI32FunctionIndex() {
test(b -> b.addElemFunctionIndex(65536), new byte[]{0x60, 0x00, 0x00, 0x01, 0x00});
}
@Test
public void testElemGlobalIndex() {
test(b -> b.addElemGlobalIndex(256), new byte[]{(byte) 0xC0, 0x00, 0x01});
}
@Test
public void testCodeEntryMin() {
test(b -> b.addCodeEntry(0, 0, 0, 0, 0), new byte[]{0x04, 0x00});
}
@Test
public void testCodeEntryMinU8FunctionIndex() {
test(b -> b.addCodeEntry(1, 0, 0, 0, 0), new byte[]{0x44, 0x01, 0x00});
}
@Test
public void testCodeEntryMaxU8FunctionIndex() {
test(b -> b.addCodeEntry(255, 0, 0, 0, 0), new byte[]{0x44, (byte) 0xFF, 0x00});
}
@Test
public void testCodeEntryMinU16FunctionIndex() {
test(b -> b.addCodeEntry(256, 0, 0, 0, 0), new byte[]{(byte) 0x84, 0x00, 0x01, 0x00});
}
@Test
public void testCodeEntryMaxU16FunctionIndex() {
test(b -> b.addCodeEntry(65535, 0, 0, 0, 0), new byte[]{(byte) 0x84, (byte) 0xFF, (byte) 0xFF, 0x00});
}
@Test
public void testCodeEntryMinI32FunctionIndex() {
test(b -> b.addCodeEntry(65536, 0, 0, 0, 0), new byte[]{(byte) 0xC4, 0x00, 0x00, 0x01, 0x00, 0x00});
}
@Test
public void testCodeEntryMinU8StackSize() {
test(b -> b.addCodeEntry(0, 1, 0, 0, 0), new byte[]{0x14, 0x01, 0x00});
}
@Test
public void testCodeEntryMaxU8StackSize() {
test(b -> b.addCodeEntry(0, 255, 0, 0, 0), new byte[]{0x14, (byte) 0xFF, 0x00});
}
@Test
public void testCodeEntryMinU16StackSize() {
test(b -> b.addCodeEntry(0, 256, 0, 0, 0), new byte[]{0x24, 0x00, 0x01, 0x00});
}
@Test
public void testCodeEntryMaxU16StackSize() {
test(b -> b.addCodeEntry(0, 65535, 0, 0, 0), new byte[]{0x24, (byte) 0xFF, (byte) 0xFF, 0x00});
}
@Test
public void testCodeEntryMinI32StackSize() {
test(b -> b.addCodeEntry(0, 65536, 0, 0, 0), new byte[]{0x34, 0x00, 0x00, 0x01, 0x00, 0x00});
}
@Test
public void testCodeEntryMaxU8Length() {
test(b -> b.addCodeEntry(0, 0, 255, 0, 0), new byte[]{0x04, (byte) 0xFF});
}
@Test
public void testCodeEntryMinU16Length() {
test(b -> b.addCodeEntry(0, 0, 256, 0, 0), new byte[]{0x08, 0x00, 0x01});
}
@Test
public void testCodeEntryMaxU16Length() {
test(b -> b.addCodeEntry(0, 0, 65535, 0, 0), new byte[]{0x08, (byte) 0xFF, (byte) 0xFF});
}
@Test
public void testCodeEntryMinI32Length() {
test(b -> b.addCodeEntry(0, 0, 65536, 0, 0), new byte[]{0x0C, 0x00, 0x00, 0x01, 0x00});
}
@Test
public void testCodeEntryLocals() {
test(b -> b.addCodeEntry(0, 0, 0, 1, 0), new byte[]{0x06, 0x00});
}
@Test
public void testCodeEntryResults() {
test(b -> b.addCodeEntry(0, 0, 0, 0, 1), new byte[]{0x05, 0x00});
}
@Test
public void testCatchExceptionHandler() {
test(b -> b.addExceptionHandler(5, 10, ExceptionHandlerType.CATCH, 0, 10), new byte[]{0x05, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x00, 0x00});
}
@Test
public void testCatchRefExceptionHandler() {
test(b -> b.addExceptionHandler(0, 12, ExceptionHandlerType.CATCH_REF, 1, 256),
new byte[]{0x00, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x01, 0x01, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00});
}
}
|
googleapis/google-cloud-java | 34,970 | java-aiplatform/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/UpdateModelMonitorRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/aiplatform/v1beta1/model_monitoring_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.aiplatform.v1beta1;
/**
*
*
* <pre>
* Request message for
* [ModelMonitoringService.UpdateModelMonitor][google.cloud.aiplatform.v1beta1.ModelMonitoringService.UpdateModelMonitor].
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1beta1.UpdateModelMonitorRequest}
*/
public final class UpdateModelMonitorRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1beta1.UpdateModelMonitorRequest)
UpdateModelMonitorRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateModelMonitorRequest.newBuilder() to construct.
private UpdateModelMonitorRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateModelMonitorRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateModelMonitorRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1beta1.ModelMonitoringServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_UpdateModelMonitorRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1beta1.ModelMonitoringServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_UpdateModelMonitorRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1beta1.UpdateModelMonitorRequest.class,
com.google.cloud.aiplatform.v1beta1.UpdateModelMonitorRequest.Builder.class);
}
private int bitField0_;
public static final int MODEL_MONITOR_FIELD_NUMBER = 1;
private com.google.cloud.aiplatform.v1beta1.ModelMonitor modelMonitor_;
/**
*
*
* <pre>
* Required. The model monitoring configuration which replaces the resource on
* the server.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.ModelMonitor model_monitor = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the modelMonitor field is set.
*/
@java.lang.Override
public boolean hasModelMonitor() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The model monitoring configuration which replaces the resource on
* the server.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.ModelMonitor model_monitor = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The modelMonitor.
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.ModelMonitor getModelMonitor() {
return modelMonitor_ == null
? com.google.cloud.aiplatform.v1beta1.ModelMonitor.getDefaultInstance()
: modelMonitor_;
}
/**
*
*
* <pre>
* Required. The model monitoring configuration which replaces the resource on
* the server.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.ModelMonitor model_monitor = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.ModelMonitorOrBuilder getModelMonitorOrBuilder() {
return modelMonitor_ == null
? com.google.cloud.aiplatform.v1beta1.ModelMonitor.getDefaultInstance()
: modelMonitor_;
}
public static final int UPDATE_MASK_FIELD_NUMBER = 2;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* Required. Mask specifying which fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. Mask specifying which fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* Required. Mask specifying which fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getModelMonitor());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getUpdateMask());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getModelMonitor());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.aiplatform.v1beta1.UpdateModelMonitorRequest)) {
return super.equals(obj);
}
com.google.cloud.aiplatform.v1beta1.UpdateModelMonitorRequest other =
(com.google.cloud.aiplatform.v1beta1.UpdateModelMonitorRequest) obj;
if (hasModelMonitor() != other.hasModelMonitor()) return false;
if (hasModelMonitor()) {
if (!getModelMonitor().equals(other.getModelMonitor())) return false;
}
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasModelMonitor()) {
hash = (37 * hash) + MODEL_MONITOR_FIELD_NUMBER;
hash = (53 * hash) + getModelMonitor().hashCode();
}
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.aiplatform.v1beta1.UpdateModelMonitorRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.UpdateModelMonitorRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.UpdateModelMonitorRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.UpdateModelMonitorRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.UpdateModelMonitorRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.UpdateModelMonitorRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.UpdateModelMonitorRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.UpdateModelMonitorRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.UpdateModelMonitorRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.UpdateModelMonitorRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.UpdateModelMonitorRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.UpdateModelMonitorRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.aiplatform.v1beta1.UpdateModelMonitorRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for
* [ModelMonitoringService.UpdateModelMonitor][google.cloud.aiplatform.v1beta1.ModelMonitoringService.UpdateModelMonitor].
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1beta1.UpdateModelMonitorRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1beta1.UpdateModelMonitorRequest)
com.google.cloud.aiplatform.v1beta1.UpdateModelMonitorRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1beta1.ModelMonitoringServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_UpdateModelMonitorRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1beta1.ModelMonitoringServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_UpdateModelMonitorRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1beta1.UpdateModelMonitorRequest.class,
com.google.cloud.aiplatform.v1beta1.UpdateModelMonitorRequest.Builder.class);
}
// Construct using com.google.cloud.aiplatform.v1beta1.UpdateModelMonitorRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getModelMonitorFieldBuilder();
getUpdateMaskFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
modelMonitor_ = null;
if (modelMonitorBuilder_ != null) {
modelMonitorBuilder_.dispose();
modelMonitorBuilder_ = null;
}
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.aiplatform.v1beta1.ModelMonitoringServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_UpdateModelMonitorRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.UpdateModelMonitorRequest
getDefaultInstanceForType() {
return com.google.cloud.aiplatform.v1beta1.UpdateModelMonitorRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.UpdateModelMonitorRequest build() {
com.google.cloud.aiplatform.v1beta1.UpdateModelMonitorRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.UpdateModelMonitorRequest buildPartial() {
com.google.cloud.aiplatform.v1beta1.UpdateModelMonitorRequest result =
new com.google.cloud.aiplatform.v1beta1.UpdateModelMonitorRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.aiplatform.v1beta1.UpdateModelMonitorRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.modelMonitor_ =
modelMonitorBuilder_ == null ? modelMonitor_ : modelMonitorBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.aiplatform.v1beta1.UpdateModelMonitorRequest) {
return mergeFrom((com.google.cloud.aiplatform.v1beta1.UpdateModelMonitorRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.aiplatform.v1beta1.UpdateModelMonitorRequest other) {
if (other
== com.google.cloud.aiplatform.v1beta1.UpdateModelMonitorRequest.getDefaultInstance())
return this;
if (other.hasModelMonitor()) {
mergeModelMonitor(other.getModelMonitor());
}
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getModelMonitorFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.cloud.aiplatform.v1beta1.ModelMonitor modelMonitor_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1beta1.ModelMonitor,
com.google.cloud.aiplatform.v1beta1.ModelMonitor.Builder,
com.google.cloud.aiplatform.v1beta1.ModelMonitorOrBuilder>
modelMonitorBuilder_;
/**
*
*
* <pre>
* Required. The model monitoring configuration which replaces the resource on
* the server.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.ModelMonitor model_monitor = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the modelMonitor field is set.
*/
public boolean hasModelMonitor() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The model monitoring configuration which replaces the resource on
* the server.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.ModelMonitor model_monitor = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The modelMonitor.
*/
public com.google.cloud.aiplatform.v1beta1.ModelMonitor getModelMonitor() {
if (modelMonitorBuilder_ == null) {
return modelMonitor_ == null
? com.google.cloud.aiplatform.v1beta1.ModelMonitor.getDefaultInstance()
: modelMonitor_;
} else {
return modelMonitorBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The model monitoring configuration which replaces the resource on
* the server.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.ModelMonitor model_monitor = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setModelMonitor(com.google.cloud.aiplatform.v1beta1.ModelMonitor value) {
if (modelMonitorBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
modelMonitor_ = value;
} else {
modelMonitorBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The model monitoring configuration which replaces the resource on
* the server.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.ModelMonitor model_monitor = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setModelMonitor(
com.google.cloud.aiplatform.v1beta1.ModelMonitor.Builder builderForValue) {
if (modelMonitorBuilder_ == null) {
modelMonitor_ = builderForValue.build();
} else {
modelMonitorBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The model monitoring configuration which replaces the resource on
* the server.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.ModelMonitor model_monitor = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeModelMonitor(com.google.cloud.aiplatform.v1beta1.ModelMonitor value) {
if (modelMonitorBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& modelMonitor_ != null
&& modelMonitor_
!= com.google.cloud.aiplatform.v1beta1.ModelMonitor.getDefaultInstance()) {
getModelMonitorBuilder().mergeFrom(value);
} else {
modelMonitor_ = value;
}
} else {
modelMonitorBuilder_.mergeFrom(value);
}
if (modelMonitor_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The model monitoring configuration which replaces the resource on
* the server.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.ModelMonitor model_monitor = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearModelMonitor() {
bitField0_ = (bitField0_ & ~0x00000001);
modelMonitor_ = null;
if (modelMonitorBuilder_ != null) {
modelMonitorBuilder_.dispose();
modelMonitorBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The model monitoring configuration which replaces the resource on
* the server.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.ModelMonitor model_monitor = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.aiplatform.v1beta1.ModelMonitor.Builder getModelMonitorBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getModelMonitorFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The model monitoring configuration which replaces the resource on
* the server.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.ModelMonitor model_monitor = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.aiplatform.v1beta1.ModelMonitorOrBuilder getModelMonitorOrBuilder() {
if (modelMonitorBuilder_ != null) {
return modelMonitorBuilder_.getMessageOrBuilder();
} else {
return modelMonitor_ == null
? com.google.cloud.aiplatform.v1beta1.ModelMonitor.getDefaultInstance()
: modelMonitor_;
}
}
/**
*
*
* <pre>
* Required. The model monitoring configuration which replaces the resource on
* the server.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.ModelMonitor model_monitor = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1beta1.ModelMonitor,
com.google.cloud.aiplatform.v1beta1.ModelMonitor.Builder,
com.google.cloud.aiplatform.v1beta1.ModelMonitorOrBuilder>
getModelMonitorFieldBuilder() {
if (modelMonitorBuilder_ == null) {
modelMonitorBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1beta1.ModelMonitor,
com.google.cloud.aiplatform.v1beta1.ModelMonitor.Builder,
com.google.cloud.aiplatform.v1beta1.ModelMonitorOrBuilder>(
getModelMonitor(), getParentForChildren(), isClean());
modelMonitor_ = null;
}
return modelMonitorBuilder_;
}
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* Required. Mask specifying which fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. Mask specifying which fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. Mask specifying which fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
} else {
updateMaskBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Mask specifying which fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Mask specifying which fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& updateMask_ != null
&& updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) {
getUpdateMaskBuilder().mergeFrom(value);
} else {
updateMask_ = value;
}
} else {
updateMaskBuilder_.mergeFrom(value);
}
if (updateMask_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. Mask specifying which fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearUpdateMask() {
bitField0_ = (bitField0_ & ~0x00000002);
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Mask specifying which fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. Mask specifying which fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* Required. Mask specifying which fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1beta1.UpdateModelMonitorRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1beta1.UpdateModelMonitorRequest)
private static final com.google.cloud.aiplatform.v1beta1.UpdateModelMonitorRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1beta1.UpdateModelMonitorRequest();
}
public static com.google.cloud.aiplatform.v1beta1.UpdateModelMonitorRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateModelMonitorRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateModelMonitorRequest>() {
@java.lang.Override
public UpdateModelMonitorRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdateModelMonitorRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateModelMonitorRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.UpdateModelMonitorRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 35,231 | java-network-security/google-cloud-network-security/src/main/java/com/google/cloud/networksecurity/v1/stub/GrpcAddressGroupServiceStub.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.networksecurity.v1.stub;
import static com.google.cloud.networksecurity.v1.AddressGroupServiceClient.ListAddressGroupReferencesPagedResponse;
import static com.google.cloud.networksecurity.v1.AddressGroupServiceClient.ListAddressGroupsPagedResponse;
import static com.google.cloud.networksecurity.v1.AddressGroupServiceClient.ListLocationsPagedResponse;
import com.google.api.gax.core.BackgroundResource;
import com.google.api.gax.core.BackgroundResourceAggregation;
import com.google.api.gax.grpc.GrpcCallSettings;
import com.google.api.gax.grpc.GrpcStubCallableFactory;
import com.google.api.gax.rpc.ClientContext;
import com.google.api.gax.rpc.OperationCallable;
import com.google.api.gax.rpc.RequestParamsBuilder;
import com.google.api.gax.rpc.UnaryCallable;
import com.google.cloud.location.GetLocationRequest;
import com.google.cloud.location.ListLocationsRequest;
import com.google.cloud.location.ListLocationsResponse;
import com.google.cloud.location.Location;
import com.google.cloud.networksecurity.v1.AddAddressGroupItemsRequest;
import com.google.cloud.networksecurity.v1.AddressGroup;
import com.google.cloud.networksecurity.v1.CloneAddressGroupItemsRequest;
import com.google.cloud.networksecurity.v1.CreateAddressGroupRequest;
import com.google.cloud.networksecurity.v1.DeleteAddressGroupRequest;
import com.google.cloud.networksecurity.v1.GetAddressGroupRequest;
import com.google.cloud.networksecurity.v1.ListAddressGroupReferencesRequest;
import com.google.cloud.networksecurity.v1.ListAddressGroupReferencesResponse;
import com.google.cloud.networksecurity.v1.ListAddressGroupsRequest;
import com.google.cloud.networksecurity.v1.ListAddressGroupsResponse;
import com.google.cloud.networksecurity.v1.OperationMetadata;
import com.google.cloud.networksecurity.v1.RemoveAddressGroupItemsRequest;
import com.google.cloud.networksecurity.v1.UpdateAddressGroupRequest;
import com.google.iam.v1.GetIamPolicyRequest;
import com.google.iam.v1.Policy;
import com.google.iam.v1.SetIamPolicyRequest;
import com.google.iam.v1.TestIamPermissionsRequest;
import com.google.iam.v1.TestIamPermissionsResponse;
import com.google.longrunning.Operation;
import com.google.longrunning.stub.GrpcOperationsStub;
import com.google.protobuf.Empty;
import io.grpc.MethodDescriptor;
import io.grpc.protobuf.ProtoUtils;
import java.io.IOException;
import java.util.concurrent.TimeUnit;
import javax.annotation.Generated;
// AUTO-GENERATED DOCUMENTATION AND CLASS.
/**
* gRPC stub implementation for the AddressGroupService service API.
*
* <p>This class is for advanced usage and reflects the underlying API directly.
*/
@Generated("by gapic-generator-java")
public class GrpcAddressGroupServiceStub extends AddressGroupServiceStub {
private static final MethodDescriptor<ListAddressGroupsRequest, ListAddressGroupsResponse>
listAddressGroupsMethodDescriptor =
MethodDescriptor.<ListAddressGroupsRequest, ListAddressGroupsResponse>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName(
"google.cloud.networksecurity.v1.AddressGroupService/ListAddressGroups")
.setRequestMarshaller(
ProtoUtils.marshaller(ListAddressGroupsRequest.getDefaultInstance()))
.setResponseMarshaller(
ProtoUtils.marshaller(ListAddressGroupsResponse.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<GetAddressGroupRequest, AddressGroup>
getAddressGroupMethodDescriptor =
MethodDescriptor.<GetAddressGroupRequest, AddressGroup>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName(
"google.cloud.networksecurity.v1.AddressGroupService/GetAddressGroup")
.setRequestMarshaller(
ProtoUtils.marshaller(GetAddressGroupRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(AddressGroup.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<CreateAddressGroupRequest, Operation>
createAddressGroupMethodDescriptor =
MethodDescriptor.<CreateAddressGroupRequest, Operation>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName(
"google.cloud.networksecurity.v1.AddressGroupService/CreateAddressGroup")
.setRequestMarshaller(
ProtoUtils.marshaller(CreateAddressGroupRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Operation.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<UpdateAddressGroupRequest, Operation>
updateAddressGroupMethodDescriptor =
MethodDescriptor.<UpdateAddressGroupRequest, Operation>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName(
"google.cloud.networksecurity.v1.AddressGroupService/UpdateAddressGroup")
.setRequestMarshaller(
ProtoUtils.marshaller(UpdateAddressGroupRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Operation.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<AddAddressGroupItemsRequest, Operation>
addAddressGroupItemsMethodDescriptor =
MethodDescriptor.<AddAddressGroupItemsRequest, Operation>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName(
"google.cloud.networksecurity.v1.AddressGroupService/AddAddressGroupItems")
.setRequestMarshaller(
ProtoUtils.marshaller(AddAddressGroupItemsRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Operation.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<RemoveAddressGroupItemsRequest, Operation>
removeAddressGroupItemsMethodDescriptor =
MethodDescriptor.<RemoveAddressGroupItemsRequest, Operation>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName(
"google.cloud.networksecurity.v1.AddressGroupService/RemoveAddressGroupItems")
.setRequestMarshaller(
ProtoUtils.marshaller(RemoveAddressGroupItemsRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Operation.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<CloneAddressGroupItemsRequest, Operation>
cloneAddressGroupItemsMethodDescriptor =
MethodDescriptor.<CloneAddressGroupItemsRequest, Operation>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName(
"google.cloud.networksecurity.v1.AddressGroupService/CloneAddressGroupItems")
.setRequestMarshaller(
ProtoUtils.marshaller(CloneAddressGroupItemsRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Operation.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<DeleteAddressGroupRequest, Operation>
deleteAddressGroupMethodDescriptor =
MethodDescriptor.<DeleteAddressGroupRequest, Operation>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName(
"google.cloud.networksecurity.v1.AddressGroupService/DeleteAddressGroup")
.setRequestMarshaller(
ProtoUtils.marshaller(DeleteAddressGroupRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Operation.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<
ListAddressGroupReferencesRequest, ListAddressGroupReferencesResponse>
listAddressGroupReferencesMethodDescriptor =
MethodDescriptor
.<ListAddressGroupReferencesRequest, ListAddressGroupReferencesResponse>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName(
"google.cloud.networksecurity.v1.AddressGroupService/ListAddressGroupReferences")
.setRequestMarshaller(
ProtoUtils.marshaller(ListAddressGroupReferencesRequest.getDefaultInstance()))
.setResponseMarshaller(
ProtoUtils.marshaller(ListAddressGroupReferencesResponse.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<ListLocationsRequest, ListLocationsResponse>
listLocationsMethodDescriptor =
MethodDescriptor.<ListLocationsRequest, ListLocationsResponse>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.location.Locations/ListLocations")
.setRequestMarshaller(
ProtoUtils.marshaller(ListLocationsRequest.getDefaultInstance()))
.setResponseMarshaller(
ProtoUtils.marshaller(ListLocationsResponse.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<GetLocationRequest, Location> getLocationMethodDescriptor =
MethodDescriptor.<GetLocationRequest, Location>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.location.Locations/GetLocation")
.setRequestMarshaller(ProtoUtils.marshaller(GetLocationRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Location.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<SetIamPolicyRequest, Policy> setIamPolicyMethodDescriptor =
MethodDescriptor.<SetIamPolicyRequest, Policy>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.iam.v1.IAMPolicy/SetIamPolicy")
.setRequestMarshaller(ProtoUtils.marshaller(SetIamPolicyRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Policy.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<GetIamPolicyRequest, Policy> getIamPolicyMethodDescriptor =
MethodDescriptor.<GetIamPolicyRequest, Policy>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.iam.v1.IAMPolicy/GetIamPolicy")
.setRequestMarshaller(ProtoUtils.marshaller(GetIamPolicyRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Policy.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<TestIamPermissionsRequest, TestIamPermissionsResponse>
testIamPermissionsMethodDescriptor =
MethodDescriptor.<TestIamPermissionsRequest, TestIamPermissionsResponse>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.iam.v1.IAMPolicy/TestIamPermissions")
.setRequestMarshaller(
ProtoUtils.marshaller(TestIamPermissionsRequest.getDefaultInstance()))
.setResponseMarshaller(
ProtoUtils.marshaller(TestIamPermissionsResponse.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private final UnaryCallable<ListAddressGroupsRequest, ListAddressGroupsResponse>
listAddressGroupsCallable;
private final UnaryCallable<ListAddressGroupsRequest, ListAddressGroupsPagedResponse>
listAddressGroupsPagedCallable;
private final UnaryCallable<GetAddressGroupRequest, AddressGroup> getAddressGroupCallable;
private final UnaryCallable<CreateAddressGroupRequest, Operation> createAddressGroupCallable;
private final OperationCallable<CreateAddressGroupRequest, AddressGroup, OperationMetadata>
createAddressGroupOperationCallable;
private final UnaryCallable<UpdateAddressGroupRequest, Operation> updateAddressGroupCallable;
private final OperationCallable<UpdateAddressGroupRequest, AddressGroup, OperationMetadata>
updateAddressGroupOperationCallable;
private final UnaryCallable<AddAddressGroupItemsRequest, Operation> addAddressGroupItemsCallable;
private final OperationCallable<AddAddressGroupItemsRequest, AddressGroup, OperationMetadata>
addAddressGroupItemsOperationCallable;
private final UnaryCallable<RemoveAddressGroupItemsRequest, Operation>
removeAddressGroupItemsCallable;
private final OperationCallable<RemoveAddressGroupItemsRequest, AddressGroup, OperationMetadata>
removeAddressGroupItemsOperationCallable;
private final UnaryCallable<CloneAddressGroupItemsRequest, Operation>
cloneAddressGroupItemsCallable;
private final OperationCallable<CloneAddressGroupItemsRequest, AddressGroup, OperationMetadata>
cloneAddressGroupItemsOperationCallable;
private final UnaryCallable<DeleteAddressGroupRequest, Operation> deleteAddressGroupCallable;
private final OperationCallable<DeleteAddressGroupRequest, Empty, OperationMetadata>
deleteAddressGroupOperationCallable;
private final UnaryCallable<ListAddressGroupReferencesRequest, ListAddressGroupReferencesResponse>
listAddressGroupReferencesCallable;
private final UnaryCallable<
ListAddressGroupReferencesRequest, ListAddressGroupReferencesPagedResponse>
listAddressGroupReferencesPagedCallable;
private final UnaryCallable<ListLocationsRequest, ListLocationsResponse> listLocationsCallable;
private final UnaryCallable<ListLocationsRequest, ListLocationsPagedResponse>
listLocationsPagedCallable;
private final UnaryCallable<GetLocationRequest, Location> getLocationCallable;
private final UnaryCallable<SetIamPolicyRequest, Policy> setIamPolicyCallable;
private final UnaryCallable<GetIamPolicyRequest, Policy> getIamPolicyCallable;
private final UnaryCallable<TestIamPermissionsRequest, TestIamPermissionsResponse>
testIamPermissionsCallable;
private final BackgroundResource backgroundResources;
private final GrpcOperationsStub operationsStub;
private final GrpcStubCallableFactory callableFactory;
public static final GrpcAddressGroupServiceStub create(AddressGroupServiceStubSettings settings)
throws IOException {
return new GrpcAddressGroupServiceStub(settings, ClientContext.create(settings));
}
public static final GrpcAddressGroupServiceStub create(ClientContext clientContext)
throws IOException {
return new GrpcAddressGroupServiceStub(
AddressGroupServiceStubSettings.newBuilder().build(), clientContext);
}
public static final GrpcAddressGroupServiceStub create(
ClientContext clientContext, GrpcStubCallableFactory callableFactory) throws IOException {
return new GrpcAddressGroupServiceStub(
AddressGroupServiceStubSettings.newBuilder().build(), clientContext, callableFactory);
}
/**
* Constructs an instance of GrpcAddressGroupServiceStub, using the given settings. This is
* protected so that it is easy to make a subclass, but otherwise, the static factory methods
* should be preferred.
*/
protected GrpcAddressGroupServiceStub(
AddressGroupServiceStubSettings settings, ClientContext clientContext) throws IOException {
this(settings, clientContext, new GrpcAddressGroupServiceCallableFactory());
}
/**
* Constructs an instance of GrpcAddressGroupServiceStub, using the given settings. This is
* protected so that it is easy to make a subclass, but otherwise, the static factory methods
* should be preferred.
*/
protected GrpcAddressGroupServiceStub(
AddressGroupServiceStubSettings settings,
ClientContext clientContext,
GrpcStubCallableFactory callableFactory)
throws IOException {
this.callableFactory = callableFactory;
this.operationsStub = GrpcOperationsStub.create(clientContext, callableFactory);
GrpcCallSettings<ListAddressGroupsRequest, ListAddressGroupsResponse>
listAddressGroupsTransportSettings =
GrpcCallSettings.<ListAddressGroupsRequest, ListAddressGroupsResponse>newBuilder()
.setMethodDescriptor(listAddressGroupsMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("parent", String.valueOf(request.getParent()));
return builder.build();
})
.build();
GrpcCallSettings<GetAddressGroupRequest, AddressGroup> getAddressGroupTransportSettings =
GrpcCallSettings.<GetAddressGroupRequest, AddressGroup>newBuilder()
.setMethodDescriptor(getAddressGroupMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
GrpcCallSettings<CreateAddressGroupRequest, Operation> createAddressGroupTransportSettings =
GrpcCallSettings.<CreateAddressGroupRequest, Operation>newBuilder()
.setMethodDescriptor(createAddressGroupMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("parent", String.valueOf(request.getParent()));
return builder.build();
})
.build();
GrpcCallSettings<UpdateAddressGroupRequest, Operation> updateAddressGroupTransportSettings =
GrpcCallSettings.<UpdateAddressGroupRequest, Operation>newBuilder()
.setMethodDescriptor(updateAddressGroupMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add(
"address_group.name", String.valueOf(request.getAddressGroup().getName()));
return builder.build();
})
.build();
GrpcCallSettings<AddAddressGroupItemsRequest, Operation> addAddressGroupItemsTransportSettings =
GrpcCallSettings.<AddAddressGroupItemsRequest, Operation>newBuilder()
.setMethodDescriptor(addAddressGroupItemsMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("address_group", String.valueOf(request.getAddressGroup()));
return builder.build();
})
.build();
GrpcCallSettings<RemoveAddressGroupItemsRequest, Operation>
removeAddressGroupItemsTransportSettings =
GrpcCallSettings.<RemoveAddressGroupItemsRequest, Operation>newBuilder()
.setMethodDescriptor(removeAddressGroupItemsMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("address_group", String.valueOf(request.getAddressGroup()));
return builder.build();
})
.build();
GrpcCallSettings<CloneAddressGroupItemsRequest, Operation>
cloneAddressGroupItemsTransportSettings =
GrpcCallSettings.<CloneAddressGroupItemsRequest, Operation>newBuilder()
.setMethodDescriptor(cloneAddressGroupItemsMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("address_group", String.valueOf(request.getAddressGroup()));
return builder.build();
})
.build();
GrpcCallSettings<DeleteAddressGroupRequest, Operation> deleteAddressGroupTransportSettings =
GrpcCallSettings.<DeleteAddressGroupRequest, Operation>newBuilder()
.setMethodDescriptor(deleteAddressGroupMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
GrpcCallSettings<ListAddressGroupReferencesRequest, ListAddressGroupReferencesResponse>
listAddressGroupReferencesTransportSettings =
GrpcCallSettings
.<ListAddressGroupReferencesRequest, ListAddressGroupReferencesResponse>newBuilder()
.setMethodDescriptor(listAddressGroupReferencesMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("address_group", String.valueOf(request.getAddressGroup()));
return builder.build();
})
.build();
GrpcCallSettings<ListLocationsRequest, ListLocationsResponse> listLocationsTransportSettings =
GrpcCallSettings.<ListLocationsRequest, ListLocationsResponse>newBuilder()
.setMethodDescriptor(listLocationsMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
GrpcCallSettings<GetLocationRequest, Location> getLocationTransportSettings =
GrpcCallSettings.<GetLocationRequest, Location>newBuilder()
.setMethodDescriptor(getLocationMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
GrpcCallSettings<SetIamPolicyRequest, Policy> setIamPolicyTransportSettings =
GrpcCallSettings.<SetIamPolicyRequest, Policy>newBuilder()
.setMethodDescriptor(setIamPolicyMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("resource", String.valueOf(request.getResource()));
return builder.build();
})
.build();
GrpcCallSettings<GetIamPolicyRequest, Policy> getIamPolicyTransportSettings =
GrpcCallSettings.<GetIamPolicyRequest, Policy>newBuilder()
.setMethodDescriptor(getIamPolicyMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("resource", String.valueOf(request.getResource()));
return builder.build();
})
.build();
GrpcCallSettings<TestIamPermissionsRequest, TestIamPermissionsResponse>
testIamPermissionsTransportSettings =
GrpcCallSettings.<TestIamPermissionsRequest, TestIamPermissionsResponse>newBuilder()
.setMethodDescriptor(testIamPermissionsMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("resource", String.valueOf(request.getResource()));
return builder.build();
})
.build();
this.listAddressGroupsCallable =
callableFactory.createUnaryCallable(
listAddressGroupsTransportSettings,
settings.listAddressGroupsSettings(),
clientContext);
this.listAddressGroupsPagedCallable =
callableFactory.createPagedCallable(
listAddressGroupsTransportSettings,
settings.listAddressGroupsSettings(),
clientContext);
this.getAddressGroupCallable =
callableFactory.createUnaryCallable(
getAddressGroupTransportSettings, settings.getAddressGroupSettings(), clientContext);
this.createAddressGroupCallable =
callableFactory.createUnaryCallable(
createAddressGroupTransportSettings,
settings.createAddressGroupSettings(),
clientContext);
this.createAddressGroupOperationCallable =
callableFactory.createOperationCallable(
createAddressGroupTransportSettings,
settings.createAddressGroupOperationSettings(),
clientContext,
operationsStub);
this.updateAddressGroupCallable =
callableFactory.createUnaryCallable(
updateAddressGroupTransportSettings,
settings.updateAddressGroupSettings(),
clientContext);
this.updateAddressGroupOperationCallable =
callableFactory.createOperationCallable(
updateAddressGroupTransportSettings,
settings.updateAddressGroupOperationSettings(),
clientContext,
operationsStub);
this.addAddressGroupItemsCallable =
callableFactory.createUnaryCallable(
addAddressGroupItemsTransportSettings,
settings.addAddressGroupItemsSettings(),
clientContext);
this.addAddressGroupItemsOperationCallable =
callableFactory.createOperationCallable(
addAddressGroupItemsTransportSettings,
settings.addAddressGroupItemsOperationSettings(),
clientContext,
operationsStub);
this.removeAddressGroupItemsCallable =
callableFactory.createUnaryCallable(
removeAddressGroupItemsTransportSettings,
settings.removeAddressGroupItemsSettings(),
clientContext);
this.removeAddressGroupItemsOperationCallable =
callableFactory.createOperationCallable(
removeAddressGroupItemsTransportSettings,
settings.removeAddressGroupItemsOperationSettings(),
clientContext,
operationsStub);
this.cloneAddressGroupItemsCallable =
callableFactory.createUnaryCallable(
cloneAddressGroupItemsTransportSettings,
settings.cloneAddressGroupItemsSettings(),
clientContext);
this.cloneAddressGroupItemsOperationCallable =
callableFactory.createOperationCallable(
cloneAddressGroupItemsTransportSettings,
settings.cloneAddressGroupItemsOperationSettings(),
clientContext,
operationsStub);
this.deleteAddressGroupCallable =
callableFactory.createUnaryCallable(
deleteAddressGroupTransportSettings,
settings.deleteAddressGroupSettings(),
clientContext);
this.deleteAddressGroupOperationCallable =
callableFactory.createOperationCallable(
deleteAddressGroupTransportSettings,
settings.deleteAddressGroupOperationSettings(),
clientContext,
operationsStub);
this.listAddressGroupReferencesCallable =
callableFactory.createUnaryCallable(
listAddressGroupReferencesTransportSettings,
settings.listAddressGroupReferencesSettings(),
clientContext);
this.listAddressGroupReferencesPagedCallable =
callableFactory.createPagedCallable(
listAddressGroupReferencesTransportSettings,
settings.listAddressGroupReferencesSettings(),
clientContext);
this.listLocationsCallable =
callableFactory.createUnaryCallable(
listLocationsTransportSettings, settings.listLocationsSettings(), clientContext);
this.listLocationsPagedCallable =
callableFactory.createPagedCallable(
listLocationsTransportSettings, settings.listLocationsSettings(), clientContext);
this.getLocationCallable =
callableFactory.createUnaryCallable(
getLocationTransportSettings, settings.getLocationSettings(), clientContext);
this.setIamPolicyCallable =
callableFactory.createUnaryCallable(
setIamPolicyTransportSettings, settings.setIamPolicySettings(), clientContext);
this.getIamPolicyCallable =
callableFactory.createUnaryCallable(
getIamPolicyTransportSettings, settings.getIamPolicySettings(), clientContext);
this.testIamPermissionsCallable =
callableFactory.createUnaryCallable(
testIamPermissionsTransportSettings,
settings.testIamPermissionsSettings(),
clientContext);
this.backgroundResources =
new BackgroundResourceAggregation(clientContext.getBackgroundResources());
}
public GrpcOperationsStub getOperationsStub() {
return operationsStub;
}
@Override
public UnaryCallable<ListAddressGroupsRequest, ListAddressGroupsResponse>
listAddressGroupsCallable() {
return listAddressGroupsCallable;
}
@Override
public UnaryCallable<ListAddressGroupsRequest, ListAddressGroupsPagedResponse>
listAddressGroupsPagedCallable() {
return listAddressGroupsPagedCallable;
}
@Override
public UnaryCallable<GetAddressGroupRequest, AddressGroup> getAddressGroupCallable() {
return getAddressGroupCallable;
}
@Override
public UnaryCallable<CreateAddressGroupRequest, Operation> createAddressGroupCallable() {
return createAddressGroupCallable;
}
@Override
public OperationCallable<CreateAddressGroupRequest, AddressGroup, OperationMetadata>
createAddressGroupOperationCallable() {
return createAddressGroupOperationCallable;
}
@Override
public UnaryCallable<UpdateAddressGroupRequest, Operation> updateAddressGroupCallable() {
return updateAddressGroupCallable;
}
@Override
public OperationCallable<UpdateAddressGroupRequest, AddressGroup, OperationMetadata>
updateAddressGroupOperationCallable() {
return updateAddressGroupOperationCallable;
}
@Override
public UnaryCallable<AddAddressGroupItemsRequest, Operation> addAddressGroupItemsCallable() {
return addAddressGroupItemsCallable;
}
@Override
public OperationCallable<AddAddressGroupItemsRequest, AddressGroup, OperationMetadata>
addAddressGroupItemsOperationCallable() {
return addAddressGroupItemsOperationCallable;
}
@Override
public UnaryCallable<RemoveAddressGroupItemsRequest, Operation>
removeAddressGroupItemsCallable() {
return removeAddressGroupItemsCallable;
}
@Override
public OperationCallable<RemoveAddressGroupItemsRequest, AddressGroup, OperationMetadata>
removeAddressGroupItemsOperationCallable() {
return removeAddressGroupItemsOperationCallable;
}
@Override
public UnaryCallable<CloneAddressGroupItemsRequest, Operation> cloneAddressGroupItemsCallable() {
return cloneAddressGroupItemsCallable;
}
@Override
public OperationCallable<CloneAddressGroupItemsRequest, AddressGroup, OperationMetadata>
cloneAddressGroupItemsOperationCallable() {
return cloneAddressGroupItemsOperationCallable;
}
@Override
public UnaryCallable<DeleteAddressGroupRequest, Operation> deleteAddressGroupCallable() {
return deleteAddressGroupCallable;
}
@Override
public OperationCallable<DeleteAddressGroupRequest, Empty, OperationMetadata>
deleteAddressGroupOperationCallable() {
return deleteAddressGroupOperationCallable;
}
@Override
public UnaryCallable<ListAddressGroupReferencesRequest, ListAddressGroupReferencesResponse>
listAddressGroupReferencesCallable() {
return listAddressGroupReferencesCallable;
}
@Override
public UnaryCallable<ListAddressGroupReferencesRequest, ListAddressGroupReferencesPagedResponse>
listAddressGroupReferencesPagedCallable() {
return listAddressGroupReferencesPagedCallable;
}
@Override
public UnaryCallable<ListLocationsRequest, ListLocationsResponse> listLocationsCallable() {
return listLocationsCallable;
}
@Override
public UnaryCallable<ListLocationsRequest, ListLocationsPagedResponse>
listLocationsPagedCallable() {
return listLocationsPagedCallable;
}
@Override
public UnaryCallable<GetLocationRequest, Location> getLocationCallable() {
return getLocationCallable;
}
@Override
public UnaryCallable<SetIamPolicyRequest, Policy> setIamPolicyCallable() {
return setIamPolicyCallable;
}
@Override
public UnaryCallable<GetIamPolicyRequest, Policy> getIamPolicyCallable() {
return getIamPolicyCallable;
}
@Override
public UnaryCallable<TestIamPermissionsRequest, TestIamPermissionsResponse>
testIamPermissionsCallable() {
return testIamPermissionsCallable;
}
@Override
public final void close() {
try {
backgroundResources.close();
} catch (RuntimeException e) {
throw e;
} catch (Exception e) {
throw new IllegalStateException("Failed to close resource", e);
}
}
@Override
public void shutdown() {
backgroundResources.shutdown();
}
@Override
public boolean isShutdown() {
return backgroundResources.isShutdown();
}
@Override
public boolean isTerminated() {
return backgroundResources.isTerminated();
}
@Override
public void shutdownNow() {
backgroundResources.shutdownNow();
}
@Override
public boolean awaitTermination(long duration, TimeUnit unit) throws InterruptedException {
return backgroundResources.awaitTermination(duration, unit);
}
}
|
apache/poi | 34,856 | poi-ooxml/src/test/java/org/apache/poi/xssf/usermodel/TestXSSFFormulaParser.java | /* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==================================================================== */
package org.apache.poi.xssf.usermodel;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.IOException;
import java.util.Arrays;
import org.apache.poi.hssf.HSSFTestDataSamples;
import org.apache.poi.hssf.usermodel.HSSFEvaluationWorkbook;
import org.apache.poi.hssf.usermodel.HSSFWorkbook;
import org.apache.poi.ss.formula.FormulaParseException;
import org.apache.poi.ss.formula.FormulaParser;
import org.apache.poi.ss.formula.FormulaParsingWorkbook;
import org.apache.poi.ss.formula.FormulaRenderingWorkbook;
import org.apache.poi.ss.formula.FormulaType;
import org.apache.poi.ss.formula.WorkbookDependentFormula;
import org.apache.poi.ss.formula.ptg.Area3DPtg;
import org.apache.poi.ss.formula.ptg.Area3DPxg;
import org.apache.poi.ss.formula.ptg.AreaPtg;
import org.apache.poi.ss.formula.ptg.AttrPtg;
import org.apache.poi.ss.formula.ptg.ErrPtg;
import org.apache.poi.ss.formula.ptg.FuncPtg;
import org.apache.poi.ss.formula.ptg.FuncVarPtg;
import org.apache.poi.ss.formula.ptg.IntPtg;
import org.apache.poi.ss.formula.ptg.IntersectionPtg;
import org.apache.poi.ss.formula.ptg.MemAreaPtg;
import org.apache.poi.ss.formula.ptg.MemFuncPtg;
import org.apache.poi.ss.formula.ptg.NamePtg;
import org.apache.poi.ss.formula.ptg.NameXPxg;
import org.apache.poi.ss.formula.ptg.ParenthesisPtg;
import org.apache.poi.ss.formula.ptg.Ptg;
import org.apache.poi.ss.formula.ptg.Ref3DPtg;
import org.apache.poi.ss.formula.ptg.Ref3DPxg;
import org.apache.poi.ss.formula.ptg.RefPtg;
import org.apache.poi.ss.formula.ptg.StringPtg;
import org.apache.poi.ss.usermodel.Cell;
import org.apache.poi.ss.usermodel.CellType;
import org.apache.poi.ss.usermodel.FormulaEvaluator;
import org.apache.poi.ss.usermodel.Row;
import org.apache.poi.ss.usermodel.Sheet;
import org.apache.poi.ss.usermodel.Workbook;
import org.apache.poi.ss.util.CellReference;
import org.apache.poi.xssf.XSSFTestDataSamples;
import org.junit.jupiter.api.Test;
public final class TestXSSFFormulaParser {
private static Ptg[] parse(FormulaParsingWorkbook fpb, String fmla) {
return FormulaParser.parse(fmla, fpb, FormulaType.CELL, -1);
}
private static Ptg[] parse(FormulaParsingWorkbook fpb, String fmla, int rowIndex) {
return FormulaParser.parse(fmla, fpb, FormulaType.CELL, -1, rowIndex);
}
@Test
void basicParsing() throws IOException {
XSSFWorkbook wb = new XSSFWorkbook();
XSSFEvaluationWorkbook fpb = XSSFEvaluationWorkbook.create(wb);
Ptg[] ptgs;
ptgs = parse(fpb, "ABC10");
assertEquals(1, ptgs.length);
assertTrue(ptgs[0] instanceof RefPtg, "Had " + Arrays.toString(ptgs));
ptgs = parse(fpb, "A500000");
assertEquals(1, ptgs.length);
assertTrue(ptgs[0] instanceof RefPtg, "Had " + Arrays.toString(ptgs));
ptgs = parse(fpb, "ABC500000");
assertEquals(1, ptgs.length);
assertTrue(ptgs[0] instanceof RefPtg, "Had " + Arrays.toString(ptgs));
//highest allowed rows and column (XFD and 0x100000)
ptgs = parse(fpb, "XFD1048576");
assertEquals(1, ptgs.length);
assertTrue(ptgs[0] instanceof RefPtg, "Had " + Arrays.toString(ptgs));
//column greater than XFD
FormulaParseException e;
e = assertThrows(FormulaParseException.class, () -> parse(fpb, "XFE10"));
assertEquals("Specified named range 'XFE10' does not exist in the current workbook.", e.getMessage());
//row greater than 0x100000
e = assertThrows(FormulaParseException.class, () -> parse(fpb, "XFD1048577"));
assertEquals("Specified named range 'XFD1048577' does not exist in the current workbook.", e.getMessage());
// Formula referencing one cell
ptgs = parse(fpb, "ISEVEN(A1)");
assertEquals(3, ptgs.length);
assertEquals(NameXPxg.class, ptgs[0].getClass());
assertEquals(RefPtg.class, ptgs[1].getClass());
assertEquals(FuncVarPtg.class, ptgs[2].getClass());
assertEquals("ISEVEN", ptgs[0].toFormulaString());
assertEquals("A1", ptgs[1].toFormulaString());
assertEquals("#external#", ptgs[2].toFormulaString());
// Formula referencing an area
ptgs = parse(fpb, "SUM(A1:B3)");
assertEquals(2, ptgs.length);
assertEquals(AreaPtg.class, ptgs[0].getClass());
assertEquals(AttrPtg.class, ptgs[1].getClass());
assertEquals("A1:B3", ptgs[0].toFormulaString());
assertEquals("SUM", ptgs[1].toFormulaString());
// Formula referencing one cell in a different sheet
ptgs = parse(fpb, "SUM(Sheet1!A1)");
assertEquals(2, ptgs.length);
assertEquals(Ref3DPxg.class, ptgs[0].getClass());
assertEquals(AttrPtg.class, ptgs[1].getClass());
assertEquals("Sheet1!A1", ptgs[0].toFormulaString());
assertEquals("SUM", ptgs[1].toFormulaString());
// Formula referencing an area in a different sheet
ptgs = parse(fpb, "SUM(Sheet1!A1:B3)");
assertEquals(2, ptgs.length);
assertEquals(Area3DPxg.class,ptgs[0].getClass());
assertEquals(AttrPtg.class, ptgs[1].getClass());
assertEquals("Sheet1!A1:B3", ptgs[0].toFormulaString());
assertEquals("SUM", ptgs[1].toFormulaString());
wb.close();
}
@Test
void builtInFormulas() throws IOException {
XSSFWorkbook wb = new XSSFWorkbook();
XSSFEvaluationWorkbook fpb = XSSFEvaluationWorkbook.create(wb);
Ptg[] ptgs;
ptgs = parse(fpb, "LOG10");
assertEquals(1, ptgs.length);
assertTrue(ptgs[0] instanceof RefPtg);
ptgs = parse(fpb, "LOG10(100)");
assertEquals(2, ptgs.length);
assertTrue(ptgs[0] instanceof IntPtg);
assertTrue(ptgs[1] instanceof FuncPtg);
wb.close();
}
@Test
void formulaReferencesSameWorkbook() throws IOException {
// Use a test file with "other workbook" style references
// to itself
XSSFWorkbook wb = XSSFTestDataSamples.openSampleWorkbook("56737.xlsx");
XSSFEvaluationWorkbook fpb = XSSFEvaluationWorkbook.create(wb);
Ptg[] ptgs;
// Reference to a named range in our own workbook, as if it
// were defined in a different workbook
ptgs = parse(fpb, "[0]!NR_Global_B2");
assertEquals(1, ptgs.length);
assertEquals(NameXPxg.class, ptgs[0].getClass());
assertEquals(0, ((NameXPxg)ptgs[0]).getExternalWorkbookNumber());
assertNull(((NameXPxg) ptgs[0]).getSheetName());
assertEquals("NR_Global_B2",((NameXPxg)ptgs[0]).getNameName());
assertEquals("[0]!NR_Global_B2", ptgs[0].toFormulaString());
wb.close();
}
@Test
void formulaReferencesOtherSheets() throws IOException {
// Use a test file with the named ranges in place
XSSFWorkbook wb = XSSFTestDataSamples.openSampleWorkbook("56737.xlsx");
XSSFEvaluationWorkbook fpb = XSSFEvaluationWorkbook.create(wb);
Ptg[] ptgs;
// Reference to a single cell in a different sheet
ptgs = parse(fpb, "Uses!A1");
assertEquals(1, ptgs.length);
assertEquals(Ref3DPxg.class, ptgs[0].getClass());
assertEquals(-1, ((Ref3DPxg)ptgs[0]).getExternalWorkbookNumber());
assertEquals("A1", ((Ref3DPxg)ptgs[0]).format2DRefAsString());
assertEquals("Uses!A1", ptgs[0].toFormulaString());
// Reference to a single cell in a different sheet, which needs quoting
ptgs = parse(fpb, "'Testing 47100'!A1");
assertEquals(1, ptgs.length);
assertEquals(Ref3DPxg.class, ptgs[0].getClass());
assertEquals(-1, ((Ref3DPxg)ptgs[0]).getExternalWorkbookNumber());
assertEquals("Testing 47100", ((Ref3DPxg)ptgs[0]).getSheetName());
assertEquals("A1", ((Ref3DPxg)ptgs[0]).format2DRefAsString());
assertEquals("'Testing 47100'!A1", ptgs[0].toFormulaString());
// Reference to a sheet scoped named range from another sheet
ptgs = parse(fpb, "Defines!NR_To_A1");
assertEquals(1, ptgs.length);
assertEquals(NameXPxg.class, ptgs[0].getClass());
assertEquals(-1, ((NameXPxg)ptgs[0]).getExternalWorkbookNumber());
assertEquals("Defines", ((NameXPxg)ptgs[0]).getSheetName());
assertEquals("NR_To_A1",((NameXPxg)ptgs[0]).getNameName());
assertEquals("Defines!NR_To_A1", ptgs[0].toFormulaString());
// Reference to a workbook scoped named range
ptgs = parse(fpb, "NR_Global_B2");
assertEquals(1, ptgs.length);
assertEquals(NamePtg.class, ptgs[0].getClass());
assertEquals("NR_Global_B2",((NamePtg)ptgs[0]).toFormulaString(fpb));
wb.close();
}
@Test
void formulaReferencesOtherWorkbook() throws IOException {
// Use a test file with the external linked table in place
XSSFWorkbook wb = XSSFTestDataSamples.openSampleWorkbook("ref-56737.xlsx");
XSSFEvaluationWorkbook fpb = XSSFEvaluationWorkbook.create(wb);
Ptg[] ptgs;
// Reference to a single cell in a different workbook
ptgs = parse(fpb, "[1]Uses!$A$1");
assertEquals(1, ptgs.length);
assertEquals(Ref3DPxg.class, ptgs[0].getClass());
assertEquals(1, ((Ref3DPxg)ptgs[0]).getExternalWorkbookNumber());
assertEquals("Uses",((Ref3DPxg)ptgs[0]).getSheetName());
assertEquals("$A$1",((Ref3DPxg)ptgs[0]).format2DRefAsString());
assertEquals("[1]Uses!$A$1", ptgs[0].toFormulaString());
// Reference to a sheet-scoped named range in a different workbook
ptgs = parse(fpb, "[1]Defines!NR_To_A1");
assertEquals(1, ptgs.length);
assertEquals(NameXPxg.class, ptgs[0].getClass());
assertEquals(1, ((NameXPxg)ptgs[0]).getExternalWorkbookNumber());
assertEquals("Defines", ((NameXPxg)ptgs[0]).getSheetName());
assertEquals("NR_To_A1",((NameXPxg)ptgs[0]).getNameName());
assertEquals("[1]Defines!NR_To_A1", ptgs[0].toFormulaString());
// Reference to a global named range in a different workbook
ptgs = parse(fpb, "[1]!NR_Global_B2");
assertEquals(1, ptgs.length);
assertEquals(NameXPxg.class, ptgs[0].getClass());
assertEquals(1, ((NameXPxg)ptgs[0]).getExternalWorkbookNumber());
assertNull(((NameXPxg) ptgs[0]).getSheetName());
assertEquals("NR_Global_B2",((NameXPxg)ptgs[0]).getNameName());
assertEquals("[1]!NR_Global_B2", ptgs[0].toFormulaString());
wb.close();
}
/**
* A handful of functions (such as SUM, COUNTA, MIN) support
* multi-sheet references (eg Sheet1:Sheet3!A1 = Cell A1 from
* Sheets 1 through Sheet 3) and multi-sheet area references
* (eg Sheet1:Sheet3!A1:B2 = Cells A1 through B2 from Sheets
* 1 through Sheet 3).
* This test, based on common test files for HSSF and XSSF, checks
* that we can read and parse these kinds of references
* (but not evaluate - that's elsewhere in the test suite)
*/
@Test
void multiSheetReferencesHSSFandXSSF() throws IOException {
Workbook[] wbs = new Workbook[] {
HSSFTestDataSamples.openSampleWorkbook("55906-MultiSheetRefs.xls"),
XSSFTestDataSamples.openSampleWorkbook("55906-MultiSheetRefs.xlsx")
};
for (Workbook wb : wbs) {
Sheet s1 = wb.getSheetAt(0);
Ptg[] ptgs;
// Check the contents
Cell sumF = s1.getRow(2).getCell(0);
assertNotNull(sumF);
assertEquals("SUM(Sheet1:Sheet3!A1)", sumF.getCellFormula());
Cell avgF = s1.getRow(2).getCell(1);
assertNotNull(avgF);
assertEquals("AVERAGE(Sheet1:Sheet3!A1)", avgF.getCellFormula());
Cell countAF = s1.getRow(2).getCell(2);
assertNotNull(countAF);
assertEquals("COUNTA(Sheet1:Sheet3!C1)", countAF.getCellFormula());
Cell maxF = s1.getRow(4).getCell(1);
assertNotNull(maxF);
assertEquals("MAX(Sheet1:Sheet3!A$1)", maxF.getCellFormula());
Cell sumFA = s1.getRow(2).getCell(7);
assertNotNull(sumFA);
assertEquals("SUM(Sheet1:Sheet3!A1:B2)", sumFA.getCellFormula());
Cell avgFA = s1.getRow(2).getCell(8);
assertNotNull(avgFA);
assertEquals("AVERAGE(Sheet1:Sheet3!A1:B2)", avgFA.getCellFormula());
Cell maxFA = s1.getRow(4).getCell(8);
assertNotNull(maxFA);
assertEquals("MAX(Sheet1:Sheet3!A$1:B$2)", maxFA.getCellFormula());
Cell countFA = s1.getRow(5).getCell(8);
assertNotNull(countFA);
assertEquals("COUNT(Sheet1:Sheet3!$A$1:$B$2)", countFA.getCellFormula());
// Create a formula parser
final FormulaParsingWorkbook fpb;
if (wb instanceof HSSFWorkbook)
fpb = HSSFEvaluationWorkbook.create((HSSFWorkbook)wb);
else
fpb = XSSFEvaluationWorkbook.create((XSSFWorkbook)wb);
// Check things parse as expected:
// SUM to one cell over 3 workbooks, relative reference
ptgs = parse(fpb, "SUM(Sheet1:Sheet3!A1)");
assertEquals(2, ptgs.length);
if (wb instanceof HSSFWorkbook) {
assertEquals(Ref3DPtg.class, ptgs[0].getClass());
} else {
assertEquals(Ref3DPxg.class, ptgs[0].getClass());
}
assertEquals("Sheet1:Sheet3!A1", toFormulaString(ptgs[0], fpb));
assertEquals(AttrPtg.class, ptgs[1].getClass());
assertEquals("SUM", toFormulaString(ptgs[1], fpb));
// MAX to one cell over 3 workbooks, absolute row reference
ptgs = parse(fpb, "MAX(Sheet1:Sheet3!A$1)");
assertEquals(2, ptgs.length);
if (wb instanceof HSSFWorkbook) {
assertEquals(Ref3DPtg.class, ptgs[0].getClass());
} else {
assertEquals(Ref3DPxg.class, ptgs[0].getClass());
}
assertEquals("Sheet1:Sheet3!A$1", toFormulaString(ptgs[0], fpb));
assertEquals(FuncVarPtg.class, ptgs[1].getClass());
assertEquals("MAX", toFormulaString(ptgs[1], fpb));
// MIN to one cell over 3 workbooks, absolute reference
ptgs = parse(fpb, "MIN(Sheet1:Sheet3!$A$1)");
assertEquals(2, ptgs.length);
if (wb instanceof HSSFWorkbook) {
assertEquals(Ref3DPtg.class, ptgs[0].getClass());
} else {
assertEquals(Ref3DPxg.class, ptgs[0].getClass());
}
assertEquals("Sheet1:Sheet3!$A$1", toFormulaString(ptgs[0], fpb));
assertEquals(FuncVarPtg.class, ptgs[1].getClass());
assertEquals("MIN", toFormulaString(ptgs[1], fpb));
// SUM to a range of cells over 3 workbooks
ptgs = parse(fpb, "SUM(Sheet1:Sheet3!A1:B2)");
assertEquals(2, ptgs.length);
if (wb instanceof HSSFWorkbook) {
assertEquals(Area3DPtg.class, ptgs[0].getClass());
} else {
assertEquals(Area3DPxg.class, ptgs[0].getClass());
}
assertEquals("Sheet1:Sheet3!A1:B2", toFormulaString(ptgs[0], fpb));
assertEquals(AttrPtg.class, ptgs[1].getClass());
assertEquals("SUM", toFormulaString(ptgs[1], fpb));
// MIN to a range of cells over 3 workbooks, absolute reference
ptgs = parse(fpb, "MIN(Sheet1:Sheet3!$A$1:$B$2)");
assertEquals(2, ptgs.length);
if (wb instanceof HSSFWorkbook) {
assertEquals(Area3DPtg.class, ptgs[0].getClass());
} else {
assertEquals(Area3DPxg.class, ptgs[0].getClass());
}
assertEquals("Sheet1:Sheet3!$A$1:$B$2", toFormulaString(ptgs[0], fpb));
assertEquals(FuncVarPtg.class, ptgs[1].getClass());
assertEquals("MIN", toFormulaString(ptgs[1], fpb));
// Check we can round-trip - try to set a new one to a new single cell
Cell newF = s1.getRow(0).createCell(10, CellType.FORMULA);
newF.setCellFormula("SUM(Sheet2:Sheet3!A1)");
assertEquals("SUM(Sheet2:Sheet3!A1)", newF.getCellFormula());
// Check we can round-trip - try to set a new one to a cell range
newF = s1.getRow(0).createCell(11, CellType.FORMULA);
newF.setCellFormula("MIN(Sheet1:Sheet2!A1:B2)");
assertEquals("MIN(Sheet1:Sheet2!A1:B2)", newF.getCellFormula());
wb.close();
}
}
@Test
void testQuotedSheetNamesReference() throws IOException {
// quoted sheet names bug fix
// see TestHSSFFormulaEvaluator equivalent which behaves a little differently
try (XSSFWorkbook wb = new XSSFWorkbook()) {
Sheet sheet1 = wb.createSheet("Sheet1");
Sheet sheet2 = wb.createSheet("Sheet2");
Sheet sheet3 = wb.createSheet("Sheet 3");
Sheet sheet4 = wb.createSheet("Sheet4>");
Row tempRow = sheet1.createRow(0);
tempRow.createCell(0).setCellValue(1);
tempRow.createCell(1).setCellValue(2);
tempRow = sheet2.createRow(0);
tempRow.createCell(0).setCellValue(3);
tempRow.createCell(1).setCellValue(4);
tempRow = sheet3.createRow(0);
tempRow.createCell(0).setCellValue(5);
tempRow.createCell(1).setCellValue(6);
tempRow = sheet4.createRow(0);
tempRow.createCell(0).setCellValue(5);
tempRow.createCell(1).setCellValue(6);
Cell cell = tempRow.createCell(2);
// unquoted sheet names
String formula = "SUM(Sheet1:Sheet2!A1:B1)";
cell.setCellFormula(formula);
String cellFormula = cell.getCellFormula();
assertEquals(formula, cellFormula);
// quoted sheet names with no space
cell = tempRow.createCell(3);
formula = "SUM('Sheet1:Sheet2'!A1:B1)";
cell.setCellFormula(formula);
cellFormula = cell.getCellFormula();
assertEquals("SUM('Sheet1:Sheet2'!A1:B1)", cellFormula);
// quoted sheet names with space
cell = tempRow.createCell(4);
formula = "SUM('Sheet1:Sheet 3'!A1:B1)";
cell.setCellFormula(formula);
cellFormula = cell.getCellFormula();
assertEquals(formula, cellFormula);
// quoted sheet names with special character
cell = tempRow.createCell(5);
formula = "SUM('Sheet1:Sheet4>'!A1:B1)";
cell.setCellFormula(formula);
cellFormula = cell.getCellFormula();
assertEquals(formula, cellFormula);
// quoted sheet names with special character #2
// cell = tempRow.createCell(6);
// formula = "SUM('Sheet 3:Sheet4>'!A1:B1)";
// cell.setCellFormula(formula);
// cellFormula = cell.getCellFormula();
// assertEquals(formula, cellFormula);
}
}
private static String toFormulaString(Ptg ptg, FormulaParsingWorkbook wb) {
if (ptg instanceof WorkbookDependentFormula) {
return ((WorkbookDependentFormula)ptg).toFormulaString((FormulaRenderingWorkbook)wb);
}
return ptg.toFormulaString();
}
@Test
void test58648Single() throws IOException {
try (XSSFWorkbook wb = new XSSFWorkbook()) {
XSSFEvaluationWorkbook fpb = XSSFEvaluationWorkbook.create(wb);
Ptg[] ptgs;
ptgs = parse(fpb, "(ABC10 )");
assertEquals(2, ptgs.length, "Had: " + Arrays.toString(ptgs));
assertTrue(ptgs[0] instanceof RefPtg, "Had " + Arrays.toString(ptgs));
assertTrue(ptgs[1] instanceof ParenthesisPtg, "Had " + Arrays.toString(ptgs));
}
}
@Test
void test58648Basic() throws IOException {
XSSFWorkbook wb = new XSSFWorkbook();
XSSFEvaluationWorkbook fpb = XSSFEvaluationWorkbook.create(wb);
Ptg[] ptgs;
// verify whitespaces in different places
ptgs = parse(fpb, "(ABC10)");
assertEquals(2, ptgs.length);
assertTrue(ptgs[0] instanceof RefPtg);
assertTrue(ptgs[1] instanceof ParenthesisPtg);
ptgs = parse(fpb, "( ABC10)");
assertEquals(2, ptgs.length);
assertTrue(ptgs[0] instanceof RefPtg);
assertTrue(ptgs[1] instanceof ParenthesisPtg);
ptgs = parse(fpb, "(ABC10 )");
assertEquals(2, ptgs.length);
assertTrue(ptgs[0] instanceof RefPtg);
assertTrue(ptgs[1] instanceof ParenthesisPtg);
ptgs = parse(fpb, "((ABC10))");
assertEquals(3, ptgs.length);
assertTrue(ptgs[0] instanceof RefPtg);
assertTrue(ptgs[1] instanceof ParenthesisPtg);
assertTrue(ptgs[2] instanceof ParenthesisPtg);
ptgs = parse(fpb, "((ABC10) )");
assertEquals(3, ptgs.length);
assertTrue(ptgs[0] instanceof RefPtg);
assertTrue(ptgs[1] instanceof ParenthesisPtg);
assertTrue(ptgs[2] instanceof ParenthesisPtg);
ptgs = parse(fpb, "( (ABC10))");
assertEquals(3, ptgs.length);
assertTrue(ptgs[0] instanceof RefPtg);
assertTrue(ptgs[1] instanceof ParenthesisPtg);
assertTrue(ptgs[2] instanceof ParenthesisPtg);
wb.close();
}
@Test
void test58648FormulaParsing() throws IOException {
Workbook wb = XSSFTestDataSamples.openSampleWorkbook("58648.xlsx");
FormulaEvaluator evaluator = wb.getCreationHelper().createFormulaEvaluator();
for (int i = 0; i < wb.getNumberOfSheets(); i++) {
Sheet xsheet = wb.getSheetAt(i);
for (Row row : xsheet) {
for (Cell cell : row) {
if (cell.getCellType() == CellType.FORMULA) {
try {
evaluator.evaluateFormulaCell(cell);
} catch (Exception e) {
CellReference cellRef = new CellReference(cell.getRowIndex(), cell.getColumnIndex());
throw new RuntimeException("error at: " + cellRef, e);
}
}
}
}
}
Sheet sheet = wb.getSheet("my-sheet");
Cell cell = sheet.getRow(1).getCell(4);
assertEquals(CellType.FORMULA, cell.getCellType(),
"Had: " + cell);
assertEquals(CellType.NUMERIC, cell.getCachedFormulaResultType(),
"Had: " + cell + " and " + cell.getCachedFormulaResultType());
assertEquals(5d, cell.getNumericCellValue(), 0d);
wb.close();
}
@Test
void testWhitespaceInFormula() throws IOException {
XSSFWorkbook wb = new XSSFWorkbook();
XSSFEvaluationWorkbook fpb = XSSFEvaluationWorkbook.create(wb);
Ptg[] ptgs;
// verify whitespaces in different places
ptgs = parse(fpb, "INTERCEPT(A2:A5, B2:B5)");
assertEquals(3, ptgs.length);
assertTrue(ptgs[0] instanceof AreaPtg);
assertTrue(ptgs[1] instanceof AreaPtg);
assertTrue(ptgs[2] instanceof FuncPtg);
ptgs = parse(fpb, " INTERCEPT ( \t \r A2 : \nA5 , B2 : B5 ) \t");
assertEquals(3, ptgs.length);
assertTrue(ptgs[0] instanceof AreaPtg);
assertTrue(ptgs[1] instanceof AreaPtg);
assertTrue(ptgs[2] instanceof FuncPtg);
ptgs = parse(fpb, "(VLOOKUP(\"item1\", A2:B3, 2, FALSE) - VLOOKUP(\"item2\", A2:B3, 2, FALSE) )");
assertEquals(12, ptgs.length);
assertTrue(ptgs[0] instanceof StringPtg);
assertTrue(ptgs[1] instanceof AreaPtg);
assertTrue(ptgs[2] instanceof IntPtg);
ptgs = parse(fpb, "A1:B1 B1:B2");
assertEquals(4, ptgs.length);
assertTrue(ptgs[0] instanceof MemAreaPtg);
assertTrue(ptgs[1] instanceof AreaPtg);
assertTrue(ptgs[2] instanceof AreaPtg);
assertTrue(ptgs[3] instanceof IntersectionPtg);
ptgs = parse(fpb, "A1:B1 B1:B2");
assertEquals(4, ptgs.length);
assertTrue(ptgs[0] instanceof MemAreaPtg);
assertTrue(ptgs[1] instanceof AreaPtg);
assertTrue(ptgs[2] instanceof AreaPtg);
assertTrue(ptgs[3] instanceof IntersectionPtg);
wb.close();
}
@Test
void testWhitespaceInComplexFormula() throws IOException {
XSSFWorkbook wb = new XSSFWorkbook();
XSSFEvaluationWorkbook fpb = XSSFEvaluationWorkbook.create(wb);
Ptg[] ptgs;
// verify whitespaces in different places
ptgs = parse(fpb, "SUM(A1:INDEX(1:1048576,MAX(IFERROR(MATCH(99^99,B:B,1),0),IFERROR(MATCH(\"zzzz\",B:B,1),0)),MAX(IFERROR(MATCH(99^99,1:1,1),0),IFERROR(MATCH(\"zzzz\",1:1,1),0))))");
assertEquals(40, ptgs.length);
assertTrue(ptgs[0] instanceof MemFuncPtg);
assertTrue(ptgs[1] instanceof RefPtg);
assertTrue(ptgs[2] instanceof AreaPtg);
assertTrue(ptgs[3] instanceof NameXPxg);
ptgs = parse(fpb, "SUM ( A1 : INDEX( 1 : 1048576 , MAX( IFERROR ( MATCH ( 99 ^ 99 , B : B , 1 ) , 0 ) , IFERROR ( MATCH ( \"zzzz\" , B:B , 1 ) , 0 ) ) , MAX ( IFERROR ( MATCH ( 99 ^ 99 , 1 : 1 , 1 ) , 0 ) , IFERROR ( MATCH ( \"zzzz\" , 1 : 1 , 1 ) , 0 ) ) ) )");
assertEquals(40, ptgs.length);
assertTrue(ptgs[0] instanceof MemFuncPtg);
assertTrue(ptgs[1] instanceof RefPtg);
assertTrue(ptgs[2] instanceof AreaPtg);
assertTrue(ptgs[3] instanceof NameXPxg);
wb.close();
}
@Test
void parseStructuredReferences() throws IOException {
XSSFWorkbook wb = XSSFTestDataSamples.openSampleWorkbook("StructuredReferences.xlsx");
XSSFEvaluationWorkbook fpb = XSSFEvaluationWorkbook.create(wb);
Ptg[] ptgs;
/*
The following cases are tested (copied from FormulaParser.parseStructuredReference)
1 Table1[col]
2 Table1[[#Totals],[col]]
3 Table1[#Totals]
4 Table1[#All]
5 Table1[#Data]
6 Table1[#Headers]
7 Table1[#Totals]
8 Table1[#This Row]
9 Table1[[#All],[col]]
10 Table1[[#Headers],[col]]
11 Table1[[#Totals],[col]]
12 Table1[[#All],[col1]:[col2]]
13 Table1[[#Data],[col1]:[col2]]
14 Table1[[#Headers],[col1]:[col2]]
15 Table1[[#Totals],[col1]:[col2]]
16 Table1[[#Headers],[#Data],[col2]]
17 Table1[[#This Row], [col1]]
18 Table1[ [col1]:[col2] ]
19 Table1[]
*/
final String tbl = "\\_Prime.1";
final String noTotalsRowReason = ": Tables without a Totals row should return #REF! on [#Totals]";
////// Case 1: Evaluate Table1[col] with apostrophe-escaped #-signs ////////
ptgs = parse(fpb, "SUM("+tbl+"[calc='#*'#])");
assertEquals(2, ptgs.length);
// Area3DPxg [sheet=Table ! A2:A7]
assertTrue(ptgs[0] instanceof Area3DPxg);
Area3DPxg ptg0 = (Area3DPxg) ptgs[0];
assertEquals("Table", ptg0.getSheetName());
assertEquals("A2:A7", ptg0.format2DRefAsString());
// Note: structured references are evaluated and resolved to regular 3D area references.
assertEquals("Table!A2:A7", ptg0.toFormulaString());
// AttrPtg [sum ]
assertTrue(ptgs[1] instanceof AttrPtg);
AttrPtg ptg1 = (AttrPtg) ptgs[1];
assertTrue(ptg1.isSum());
////// Case 1: Evaluate "Table1[col]" ////////
ptgs = parse(fpb, tbl+"[Name]");
assertEquals(1, ptgs.length);
assertEquals("Table!B2:B7", ptgs[0].toFormulaString(), "Table1[col]");
////// Case 2: Evaluate "Table1[[#Totals],[col]]" ////////
ptgs = parse(fpb, tbl+"[[#Totals],[col]]");
assertEquals(1, ptgs.length);
assertEquals(ErrPtg.REF_INVALID, ptgs[0], "Table1[[#Totals],[col]]" + noTotalsRowReason);
////// Case 3: Evaluate "Table1[#Totals]" ////////
ptgs = parse(fpb, tbl+"[#Totals]");
assertEquals(1, ptgs.length);
assertEquals(ErrPtg.REF_INVALID, ptgs[0], "Table1[#Totals]" + noTotalsRowReason);
////// Case 4: Evaluate "Table1[#All]" ////////
ptgs = parse(fpb, tbl+"[#All]");
assertEquals(1, ptgs.length);
assertEquals("Table!A1:C7", ptgs[0].toFormulaString(), "Table1[#All]");
////// Case 5: Evaluate "Table1[#Data]" (excludes Header and Total rows) ////////
ptgs = parse(fpb, tbl+"[#Data]");
assertEquals(1, ptgs.length);
assertEquals("Table!A2:C7", ptgs[0].toFormulaString(), "Table1[#Data]");
////// Case 6: Evaluate "Table1[#Headers]" ////////
ptgs = parse(fpb, tbl+"[#Headers]");
assertEquals(1, ptgs.length);
assertEquals("Table!A1:C1", ptgs[0].toFormulaString(), "Table1[#Headers]");
////// Case 7: Evaluate "Table1[#Totals]" ////////
ptgs = parse(fpb, tbl+"[#Totals]");
assertEquals(1, ptgs.length);
assertEquals(ErrPtg.REF_INVALID, ptgs[0], "Table1[#Totals]" + noTotalsRowReason);
////// Case 8: Evaluate "Table1[#This Row]" ////////
ptgs = parse(fpb, tbl+"[#This Row]", 2);
assertEquals(1, ptgs.length);
assertEquals("Table!A3:C3", ptgs[0].toFormulaString(), "Table1[#This Row]");
////// Evaluate "Table1[@]" (equivalent to "Table1[#This Row]") ////////
ptgs = parse(fpb, tbl+"[@]", 2);
assertEquals(1, ptgs.length);
assertEquals("Table!A3:C3", ptgs[0].toFormulaString());
////// Evaluate "Table1[#This Row]" when rowIndex is outside Table ////////
ptgs = parse(fpb, tbl+"[#This Row]", 10);
assertEquals(1, ptgs.length);
assertEquals(ErrPtg.VALUE_INVALID, ptgs[0], "Table1[#This Row]");
////// Evaluate "Table1[@]" when rowIndex is outside Table ////////
ptgs = parse(fpb, tbl+"[@]", 10);
assertEquals(1, ptgs.length);
assertEquals(ErrPtg.VALUE_INVALID, ptgs[0], "Table1[@]");
////// Evaluate "Table1[[#Data],[col]]" ////////
ptgs = parse(fpb, tbl+"[[#Data], [Number]]");
assertEquals(1, ptgs.length);
assertEquals("Table!C2:C7", ptgs[0].toFormulaString(), "Table1[[#Data],[col]]");
////// Case 9: Evaluate "Table1[[#All],[col]]" ////////
ptgs = parse(fpb, tbl+"[[#All], [Number]]");
assertEquals(1, ptgs.length);
assertEquals("Table!C1:C7", ptgs[0].toFormulaString(), "Table1[[#All],[col]]");
////// Case 10: Evaluate "Table1[[#Headers],[col]]" ////////
ptgs = parse(fpb, tbl+"[[#Headers], [Number]]");
assertEquals(1, ptgs.length);
// also acceptable: Table1!B1
assertEquals("Table!C1:C1", ptgs[0].toFormulaString(), "Table1[[#Headers],[col]]");
////// Case 11: Evaluate "Table1[[#Totals],[col]]" ////////
ptgs = parse(fpb, tbl+"[[#Totals],[Name]]");
assertEquals(1, ptgs.length);
assertEquals(ErrPtg.REF_INVALID, ptgs[0], "Table1[[#Totals],[col]]" + noTotalsRowReason);
////// Case 12: Evaluate "Table1[[#All],[col1]:[col2]]" ////////
ptgs = parse(fpb, tbl+"[[#All], [Name]:[Number]]");
assertEquals(1, ptgs.length);
assertEquals("Table!B1:C7", ptgs[0].toFormulaString(), "Table1[[#All],[col1]:[col2]]");
////// Case 13: Evaluate "Table1[[#Data],[col]:[col2]]" ////////
ptgs = parse(fpb, tbl+"[[#Data], [Name]:[Number]]");
assertEquals(1, ptgs.length);
assertEquals("Table!B2:C7", ptgs[0].toFormulaString(), "Table1[[#Data],[col]:[col2]]");
////// Case 14: Evaluate "Table1[[#Headers],[col1]:[col2]]" ////////
ptgs = parse(fpb, tbl+"[[#Headers], [Name]:[Number]]");
assertEquals(1, ptgs.length);
assertEquals("Table!B1:C1", ptgs[0].toFormulaString(), "Table1[[#Headers],[col1]:[col2]]");
////// Case 15: Evaluate "Table1[[#Totals],[col]:[col2]]" ////////
ptgs = parse(fpb, tbl+"[[#Totals], [Name]:[Number]]");
assertEquals(1, ptgs.length);
assertEquals(ErrPtg.REF_INVALID, ptgs[0], "Table1[[#Totals],[col]:[col2]]" + noTotalsRowReason);
////// Case 16: Evaluate "Table1[[#Headers],[#Data],[col]]" ////////
ptgs = parse(fpb, tbl+"[[#Headers],[#Data],[Number]]");
assertEquals(1, ptgs.length);
assertEquals("Table!C1:C7", ptgs[0].toFormulaString(), "Table1[[#Headers],[#Data],[col]]");
////// Case 17: Evaluate "Table1[[#This Row], [col1]]" ////////
ptgs = parse(fpb, tbl+"[[#This Row], [Number]]", 2);
assertEquals(1, ptgs.length);
// also acceptable: Table!C3
assertEquals("Table!C3:C3", ptgs[0].toFormulaString(), "Table1[[#This Row], [col1]]");
////// Case 18: Evaluate "Table1[[col]:[col2]]" ////////
ptgs = parse(fpb, tbl+"[[Name]:[Number]]");
assertEquals(1, ptgs.length);
assertEquals("Table!B2:C7", ptgs[0].toFormulaString(), "Table1[[col]:[col2]]");
////// Case 19: Evaluate "Table1[]" ////////
// Excludes Header and Total rows, equivalent to Table1[#Data] (see case 5).
// This is the only case where [] is allowed.
ptgs = parse(fpb, tbl+"[]");
assertEquals(1, ptgs.length);
assertEquals("Table!A2:C7", ptgs[0].toFormulaString(), "Table1[]");
wb.close();
}
}
|
apache/usergrid | 34,797 | stack/corepersistence/graph/src/test/java/org/apache/usergrid/persistence/graph/serialization/EdgeSerializationTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.usergrid.persistence.graph.serialization;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Set;
import java.util.UUID;
import org.apache.usergrid.StressTest;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.usergrid.persistence.core.guice.MigrationManagerRule;
import org.apache.usergrid.persistence.core.scope.ApplicationScope;
import org.apache.usergrid.persistence.core.util.IdGenerator;
import org.apache.usergrid.persistence.graph.Edge;
import org.apache.usergrid.persistence.graph.GraphFig;
import org.apache.usergrid.persistence.graph.MarkedEdge;
import org.apache.usergrid.persistence.graph.SearchByEdge;
import org.apache.usergrid.persistence.model.entity.Id;
import org.apache.usergrid.persistence.model.util.UUIDGenerator;
import com.fasterxml.uuid.UUIDComparator;
import com.google.inject.Inject;
import com.netflix.astyanax.Keyspace;
import com.netflix.astyanax.MutationBatch;
import com.netflix.astyanax.connectionpool.exceptions.ConnectionException;
import com.netflix.astyanax.connectionpool.exceptions.NotFoundException;
import com.netflix.astyanax.model.Column;
import com.netflix.astyanax.model.ColumnFamily;
import com.netflix.astyanax.model.ConsistencyLevel;
import com.netflix.astyanax.serializers.StringSerializer;
import static org.apache.usergrid.persistence.graph.test.util.EdgeTestUtils.createEdge;
import static org.apache.usergrid.persistence.graph.test.util.EdgeTestUtils.createGetByEdge;
import static org.apache.usergrid.persistence.core.util.IdGenerator.createId;
import static org.apache.usergrid.persistence.graph.test.util.EdgeTestUtils.createMarkedEdge;
import static org.apache.usergrid.persistence.graph.test.util.EdgeTestUtils.createSearchByEdge;
import static org.apache.usergrid.persistence.graph.test.util.EdgeTestUtils.createSearchByEdgeAndId;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
/**
*
*
*/
public abstract class EdgeSerializationTest {
private static final Logger logger = LoggerFactory.getLogger( EdgeSerializationTest.class );
@Inject
@Rule
public MigrationManagerRule migrationManagerRule;
protected EdgeSerialization serialization;
@Inject
protected GraphFig graphFig;
@Inject
protected Keyspace keyspace;
protected ApplicationScope scope;
@Before
public void setup() {
scope = mock( ApplicationScope.class );
Id orgId = mock( Id.class );
when( orgId.getType() ).thenReturn( "organization" );
when( orgId.getUuid() ).thenReturn( UUIDGenerator.newTimeUUID() );
when( scope.getApplication() ).thenReturn( orgId );
serialization = getSerialization();
}
/**
* Get the edge Serialization to use
*/
protected abstract EdgeSerialization getSerialization();
/**
* Tests mixing 2 edge types between 2 nodes. We should get results for the same source->destination with the 2
* edge types
*/
@Test
public void mixedEdgeTypes() throws ConnectionException {
final MarkedEdge edge1 = createEdge( "source", "edge1", "target" );
final Id sourceId = edge1.getSourceNode();
final Id targetId = edge1.getTargetNode();
final MarkedEdge edge2 = createEdge( sourceId, "edge2", targetId );
UUID timestamp = UUIDGenerator.newTimeUUID();
serialization.writeEdge( scope, edge1, timestamp ).execute();
serialization.writeEdge( scope, edge2, timestamp ).execute();
long now = System.currentTimeMillis();
//get our edges out by name
Iterator<MarkedEdge> results =
serialization.getEdgesFromSource( scope, createSearchByEdge( sourceId, "edge1", now, null ) );
assertEquals( edge1, results.next() );
assertFalse( results.hasNext() );
//test getting the next edge
results = serialization.getEdgesFromSource( scope, createSearchByEdge( sourceId, "edge2", now, null ) );
assertEquals( edge2, results.next() );
assertFalse( results.hasNext() );
//test getting source edges from the target
results = serialization.getEdgesToTarget( scope, createSearchByEdge( targetId, "edge1", now, null ) );
assertEquals( edge1, results.next() );
assertFalse( results.hasNext() );
results = serialization.getEdgesToTarget( scope, createSearchByEdge( targetId, "edge2", now, null ) );
assertEquals( edge2, results.next() );
assertFalse( results.hasNext() );
}
/**
* Test paging by resuming the search from the edge
*/
@Test
public void testPaging() throws ConnectionException {
final MarkedEdge edge1 = createEdge( "source", "edge", "target", 0 );
final Id sourceId = edge1.getSourceNode();
final Id targetId = edge1.getTargetNode();
final MarkedEdge edge2 = createEdge( sourceId, "edge", targetId, 1 );
serialization.writeEdge( scope, edge1, UUIDGenerator.newTimeUUID() ).execute();
serialization.writeEdge( scope, edge2, UUIDGenerator.newTimeUUID() ).execute();
long now = System.currentTimeMillis();
//get our edges out by name
Iterator<MarkedEdge> results =
serialization.getEdgesFromSource( scope, createSearchByEdge( sourceId, "edge", now, edge2 ) );
assertEquals( edge1, results.next() );
assertFalse( results.hasNext() );
//test getting the next edge
results = serialization.getEdgesFromSource( scope, createSearchByEdge( sourceId, "edge", now, edge1 ) );
assertFalse( "No results should be returned", results.hasNext() );
//test getting source edges from the target
results = serialization.getEdgesToTarget( scope, createSearchByEdge( targetId, "edge", now, edge2 ) );
assertEquals( edge1, results.next() );
assertFalse( results.hasNext() );
results = serialization.getEdgesToTarget( scope, createSearchByEdge( targetId, "edge", now, edge1 ) );
assertFalse( "No results should be returned", results.hasNext() );
//test resume by name
}
/**
* Tests mixing 2 edge types between 2 nodes. We should get results for the same source->destination with the 2
* edge types
*/
@Test
public void directEdgeGets() throws ConnectionException {
long timestamp = 1000;
final MarkedEdge edgev1 = createEdge( "source", "edge1", "target", timestamp );
final Id sourceId = edgev1.getSourceNode();
final Id targetId = edgev1.getTargetNode();
final MarkedEdge edgev2 = createEdge( sourceId, "edge1", targetId, timestamp + 1 );
//we shouldn't get this one back
final MarkedEdge diffTarget = createEdge( sourceId, "edge1", IdGenerator.createId( "newTarget" ) );
assertTrue( "Edge version 1 has lower time uuid",
Long.compare( edgev1.getTimestamp(), edgev2.getTimestamp() ) < 0 );
//create edge type 2 to ensure we don't get it in results
final MarkedEdge edgeType2V1 = createEdge( sourceId, "edge2", targetId );
serialization.writeEdge( scope, edgev1, UUIDGenerator.newTimeUUID() ).execute();
serialization.writeEdge( scope, edgev2, UUIDGenerator.newTimeUUID() ).execute();
serialization.writeEdge( scope, edgeType2V1, UUIDGenerator.newTimeUUID() ).execute();
serialization.writeEdge( scope, diffTarget, UUIDGenerator.newTimeUUID() ).execute();
final long now = System.currentTimeMillis();
SearchByEdge search = createGetByEdge( sourceId, "edge1", targetId, now, null );
Iterator<MarkedEdge> results = serialization.getEdgeVersions( scope, search );
assertEquals( edgev2, results.next() );
assertEquals( edgev1, results.next() );
assertFalse( "No results should be returned", results.hasNext() );
//max version test
//test max version
search = createGetByEdge( sourceId, "edge1", targetId, edgev1.getTimestamp(), null );
results = serialization.getEdgeVersions( scope, search );
assertEquals( edgev1, results.next() );
assertFalse( "Max version was honored", results.hasNext() );
}
/**
* Tests mixing 2 edge types between 2 nodes. We should get results for the same source->destination with the 2
* edge types
*/
@Test
public void mixedIdTypes() throws ConnectionException {
final MarkedEdge edge1 = createEdge( "source", "edge", "target" );
final Id sourceId = edge1.getSourceNode();
final Id targetId1 = edge1.getTargetNode();
final MarkedEdge edge2 = createEdge( sourceId, "edge", IdGenerator.createId( "target2" ) );
final Id targetId2 = edge2.getTargetNode();
final UUID timestamp = UUIDGenerator.newTimeUUID();
serialization.writeEdge( scope, edge1, timestamp ).execute();
serialization.writeEdge( scope, edge2, timestamp ).execute();
long now = System.currentTimeMillis();
//get our edges out by name
Iterator<MarkedEdge> results = serialization.getEdgesFromSourceByTargetType( scope,
createSearchByEdgeAndId( sourceId, "edge", now, targetId1.getType(), null ) );
assertEquals( edge1, results.next() );
assertFalse( results.hasNext() );
//test getting the next edge
results = serialization.getEdgesFromSourceByTargetType( scope,
createSearchByEdgeAndId( sourceId, "edge", now, targetId2.getType(), null ) );
assertEquals( edge2, results.next() );
assertFalse( results.hasNext() );
//test getting source edges from the target
results = serialization.getEdgesToTargetBySourceType( scope,
createSearchByEdgeAndId( targetId1, "edge", now, sourceId.getType(), null ) );
assertEquals( edge1, results.next() );
assertFalse( results.hasNext() );
results = serialization.getEdgesToTargetBySourceType( scope,
createSearchByEdgeAndId( targetId2, "edge", now, sourceId.getType(), null ) );
assertEquals( edge2, results.next() );
assertFalse( results.hasNext() );
}
/**
* Test paging by resuming the search from the edge
*/
@Test
public void idTypesPaging() throws ConnectionException {
final long timestamp = 1000;
final MarkedEdge edge1 = createEdge( "source", "edge", "target", timestamp );
final Id sourceId = edge1.getSourceNode();
final Id targetId1 = edge1.getTargetNode();
final MarkedEdge edge2 = createEdge( sourceId, "edge", IdGenerator.createId( "target" ), timestamp + 1 );
final Id targetId2 = edge2.getTargetNode();
serialization.writeEdge( scope, edge1, UUIDGenerator.newTimeUUID() ).execute();
serialization.writeEdge( scope, edge2, UUIDGenerator.newTimeUUID() ).execute();
long now = System.currentTimeMillis();
//get our edges out by name
Iterator<MarkedEdge> results = serialization.getEdgesFromSourceByTargetType( scope,
createSearchByEdgeAndId( sourceId, "edge", now, targetId1.getType(), null ) );
assertEquals( edge2, results.next() );
assertEquals( edge1, results.next() );
assertFalse( results.hasNext() );
//test getting the next edge
results = serialization.getEdgesFromSourceByTargetType( scope,
createSearchByEdgeAndId( sourceId, "edge", now, targetId1.getType(), edge2 ) );
assertEquals( edge1, results.next() );
assertFalse( results.hasNext() );
results = serialization.getEdgesFromSourceByTargetType( scope,
createSearchByEdgeAndId( sourceId, "edge", now, targetId1.getType(), edge1 ) );
assertFalse( results.hasNext() );
//test getting source edges from the target
results = serialization.getEdgesToTargetBySourceType( scope,
createSearchByEdgeAndId( targetId1, "edge", now, sourceId.getType(), edge2 ) );
assertTrue( results.hasNext() );
assertEquals(edge1, results.next());
assertFalse(results.hasNext());
results = serialization.getEdgesToTargetBySourceType( scope,
createSearchByEdgeAndId( targetId2, "edge", now, sourceId.getType(), edge1 ) );
assertFalse( results.hasNext() );
}
/**
* Test paging by resuming the search from the edge
*/
@Test
public void delete() throws ConnectionException {
//we purposefully use the same timestamp
final long timestamp = 1000l;
final MarkedEdge edge1 = createEdge( "source", "edge", "target", timestamp );
final Id sourceId = edge1.getSourceNode();
final Id targetId1 = edge1.getTargetNode();
final MarkedEdge edge2 = createEdge( sourceId, "edge", IdGenerator.createId( "target" ), timestamp );
final Id targetId2 = edge2.getTargetNode();
serialization.writeEdge( scope, edge1, UUIDGenerator.newTimeUUID() ).execute();
serialization.writeEdge( scope, edge2, UUIDGenerator.newTimeUUID() ).execute();
long now = System.currentTimeMillis();
//get our edges out by name
Iterator<MarkedEdge> results = serialization.getEdgesFromSourceByTargetType( scope,
createSearchByEdgeAndId( sourceId, "edge", now, targetId1.getType(), null ) );
assertEquals( edge2, results.next() );
assertEquals( edge1, results.next() );
assertFalse( results.hasNext() );
//get them out by type
results = serialization.getEdgesFromSource( scope, createSearchByEdge( sourceId, "edge", now, null ) );
assertEquals( edge2, results.next() );
assertEquals( edge1, results.next() );
assertFalse( results.hasNext() );
//validate we get from target
results = serialization.getEdgesToTargetBySourceType( scope,
createSearchByEdgeAndId( targetId1, "edge", now, sourceId.getType(), null ) );
assertEquals( edge1, results.next() );
assertFalse( results.hasNext() );
results = serialization.getEdgesToTargetBySourceType( scope,
createSearchByEdgeAndId( targetId2, "edge", now, sourceId.getType(), null ) );
assertEquals( edge2, results.next() );
assertFalse( results.hasNext() );
//validate we get from target
results = serialization.getEdgesToTarget( scope, createSearchByEdge( targetId1, "edge", now, null ) );
assertEquals( edge1, results.next() );
assertFalse( results.hasNext() );
results = serialization.getEdgesToTarget( scope, createSearchByEdge( targetId2, "edge", now, null ) );
assertEquals( edge2, results.next() );
assertFalse( results.hasNext() );
//now we've validated everything exists, lets blitz the data and ensure it's removed
final UUID timestamp2 = UUIDGenerator.newTimeUUID();
serialization.deleteEdge( scope, edge1, timestamp2 ).execute();
serialization.deleteEdge( scope, edge2, timestamp2 ).execute();
//now we should get nothing for the same queries
results = serialization.getEdgesFromSourceByTargetType( scope,
createSearchByEdgeAndId( sourceId, "edge", now, targetId1.getType(), null ) );
assertFalse( results.hasNext() );
//get them out by type
results = serialization.getEdgesFromSource( scope, createSearchByEdge( sourceId, "edge", now, null ) );
assertFalse( results.hasNext() );
//validate we get from target
results = serialization.getEdgesToTargetBySourceType( scope,
createSearchByEdgeAndId( targetId1, "edge", now, sourceId.getType(), null ) );
assertFalse( results.hasNext() );
results = serialization.getEdgesToTargetBySourceType( scope,
createSearchByEdgeAndId( targetId2, "edge", now, sourceId.getType(), null ) );
assertFalse( results.hasNext() );
//validate we get from target
results = serialization.getEdgesToTarget( scope, createSearchByEdge( targetId1, "edge", now, null ) );
assertFalse( results.hasNext() );
results = serialization.getEdgesToTarget( scope, createSearchByEdge( targetId2, "edge", now, null ) );
assertFalse( results.hasNext() );
}
/**
* Test paging by resuming the search from the edge
*/
@Test
public void mark() throws ConnectionException {
final long timestamp = 1000l;
final MarkedEdge edge1 = createEdge( "source", "edge", "target", timestamp );
final Id sourceId = edge1.getSourceNode();
final Id targetId1 = edge1.getTargetNode();
final MarkedEdge edge2 = createEdge( sourceId, "edge", IdGenerator.createId( "target" ), timestamp + 1 );
final Id targetId2 = edge2.getTargetNode();
serialization.writeEdge( scope, edge1, UUIDGenerator.newTimeUUID() ).execute();
serialization.writeEdge( scope, edge2, UUIDGenerator.newTimeUUID() ).execute();
long now = System.currentTimeMillis();
//get our edges out by name
Iterator<MarkedEdge> results = serialization.getEdgesFromSourceByTargetType( scope,
createSearchByEdgeAndId( sourceId, "edge", now, targetId1.getType(), null ) );
assertEquals( edge2, results.next() );
assertEquals( edge1, results.next() );
assertFalse( results.hasNext() );
//get them out by type
results = serialization.getEdgesFromSource( scope, createSearchByEdge( sourceId, "edge", now, null ) );
assertEquals( edge2, results.next() );
assertEquals( edge1, results.next() );
assertFalse( results.hasNext() );
//validate we get from target
results = serialization.getEdgesToTargetBySourceType( scope,
createSearchByEdgeAndId( targetId1, "edge", now, sourceId.getType(), null ) );
assertEquals( edge1, results.next() );
assertFalse( results.hasNext() );
results = serialization.getEdgesToTargetBySourceType( scope,
createSearchByEdgeAndId( targetId2, "edge", now, sourceId.getType(), null ) );
assertEquals( edge2, results.next() );
assertFalse( results.hasNext() );
//validate we get from target
results = serialization.getEdgesToTarget( scope, createSearchByEdge( targetId1, "edge", now, null ) );
assertEquals( edge1, results.next() );
assertFalse( results.hasNext() );
results = serialization.getEdgesToTarget( scope, createSearchByEdge( targetId2, "edge", now, null ) );
assertEquals( edge2, results.next() );
assertFalse( results.hasNext() );
//now we've validated everything exists, lets blitz the data and ensure it's removed
final MarkedEdge mark1 =
createEdge( edge1.getSourceNode(), edge1.getType(), edge1.getTargetNode(), edge1.getTimestamp(), true );
final MarkedEdge mark2 =
createEdge( edge2.getSourceNode(), edge2.getType(), edge2.getTargetNode(), edge2.getTimestamp(), true );
final UUID timestamp2 = UUIDGenerator.newTimeUUID();
serialization.writeEdge( scope, mark1, timestamp2 ).execute();
serialization.writeEdge( scope, mark2, timestamp2 ).execute();
results = serialization.getEdgesFromSourceByTargetType( scope,
createSearchByEdgeAndId( sourceId, "edge", now, targetId1.getType(), null ) );
MarkedEdge edge = results.next();
assertEquals( mark2, edge );
assertTrue( edge.isDeleted() );
edge = results.next();
assertEquals( mark1, edge );
assertTrue( edge.isDeleted() );
assertFalse( results.hasNext() );
//get them out by type
results = serialization.getEdgesFromSource( scope, createSearchByEdge( sourceId, "edge", now, null ) );
edge = results.next();
assertEquals( mark2, edge );
assertTrue( edge.isDeleted() );
edge = results.next();
assertEquals( mark1, edge );
assertTrue( edge.isDeleted() );
assertFalse( results.hasNext() );
//validate we get from target
results = serialization.getEdgesToTargetBySourceType( scope,
createSearchByEdgeAndId( targetId1, "edge", now, sourceId.getType(), null ) );
edge = results.next();
assertEquals( mark1, edge );
assertTrue( edge.isDeleted() );
assertFalse( results.hasNext() );
results = serialization.getEdgesToTargetBySourceType( scope,
createSearchByEdgeAndId( targetId2, "edge", now, sourceId.getType(), null ) );
edge = results.next();
assertEquals( mark2, edge );
assertTrue( edge.isDeleted() );
assertFalse( results.hasNext() );
//validate we get from target
results = serialization.getEdgesToTarget( scope, createSearchByEdge( targetId1, "edge", now, null ) );
edge = results.next();
assertEquals( mark1, edge );
assertTrue( edge.isDeleted() );
assertFalse( results.hasNext() );
results = serialization.getEdgesToTarget( scope, createSearchByEdge( targetId2, "edge", now, null ) );
edge = results.next();
assertEquals( mark2, edge );
assertTrue( edge.isDeleted() );
assertFalse( results.hasNext() );
//now we've validated everything exists
}
/**
* Test paging by resuming the search from the edge
*/
@Test
@Category(StressTest.class)
public void pageIteration() throws ConnectionException {
int size = graphFig.getScanPageSize() * 2;
final Id sourceId = IdGenerator.createId( "source" );
final String type = "edge";
Set<Edge> edges = new HashSet<Edge>( size );
long timestamp = 0;
for ( int i = 0; i < size; i++ ) {
final MarkedEdge edge = createEdge( sourceId, type, IdGenerator.createId( "target" ), timestamp );
serialization.writeEdge( scope, edge, UUIDGenerator.newTimeUUID() ).execute();
edges.add( edge );
timestamp++;
}
//get our edges out by name
Iterator<MarkedEdge> results =
serialization.getEdgesFromSource( scope, createSearchByEdge( sourceId, type, timestamp, null ) );
for ( MarkedEdge edge : new IterableWrapper<>( results ) ) {
assertTrue( "Removed edge from write set", edges.remove( edge ) );
}
assertEquals( "All edges were returned", 0, edges.size() );
}
/**
* Tests mixing 2 edge types between 2 nodes. We should get results for the same source->destination with the 2
* edge types
*/
@Test
@Category(StressTest.class)
public void testIteratorPaging() throws ConnectionException {
final Id sourceId = IdGenerator.createId( "source" );
final String edgeType = "edge";
final Id targetId = IdGenerator.createId( "target" );
int writeCount = graphFig.getScanPageSize() * 3;
final MutationBatch batch = keyspace.prepareMutationBatch();
long timestamp = 10000l;
for ( int i = 0; i < writeCount; i++ ) {
final MarkedEdge edge = createEdge( sourceId, edgeType, targetId, timestamp );
batch.mergeShallow( serialization.writeEdge( scope, edge, UUIDGenerator.newTimeUUID() ) );
//increment timestamp (not done inline on purpose) If we do System.currentMillis we get the same edge on
// fast systems
timestamp++;
}
logger.info( "Flushing edges" );
batch.execute();
Iterator<MarkedEdge> results = serialization
.getEdgeVersions( scope, createGetByEdge( sourceId, edgeType, targetId, timestamp, null ) );
verify( results, writeCount );
//get them all from source
results = serialization.getEdgesFromSource( scope, createSearchByEdge( sourceId, edgeType, timestamp, null ) );
verify( results, writeCount );
results = serialization.getEdgesFromSourceByTargetType( scope,
createSearchByEdgeAndId( sourceId, edgeType, timestamp, targetId.getType(), null ) );
verify( results, writeCount );
results = serialization.getEdgesToTarget( scope, createSearchByEdge( targetId, edgeType, timestamp, null ) );
verify( results, writeCount );
results = serialization.getEdgesToTargetBySourceType( scope,
createSearchByEdgeAndId( targetId, edgeType, timestamp, sourceId.getType(), null ) );
verify( results, writeCount );
}
/**
* Tests writing 2 edges quickly in succession, then returning them. Was failing for commitlog impl
*/
@Test
public void successiveWriteReturnSource() throws ConnectionException {
final MarkedEdge edge1 = createMarkedEdge( "source", "edge", "target" );
final Id sourceId = edge1.getSourceNode();
final UUID timestamp1 = UUIDGenerator.newTimeUUID();
final UUID timestamp2 = UUIDGenerator.newTimeUUID();
final UUID timestamp3 = UUIDGenerator.newTimeUUID();
assertTrue( UUIDComparator.staticCompare( timestamp1, timestamp2 ) < 0 );
assertTrue( UUIDComparator.staticCompare( timestamp2, timestamp3 ) < 0 );
//we purposefully write with timestamp2
serialization.writeEdge( scope, edge1, timestamp2 ).execute();
long now = System.currentTimeMillis();
//get our edges out by name
Iterator<MarkedEdge> results =
serialization.getEdgesFromSource( scope, createSearchByEdge( sourceId, "edge", now, null ) );
assertEquals( edge1, results.next() );
assertFalse( results.hasNext() );
Iterator<MarkedEdge> versions = serialization
.getEdgeVersions( scope, createGetByEdge( sourceId, "edge", edge1.getTargetNode(), now, null ) );
assertEquals( edge1, versions.next() );
assertFalse( versions.hasNext() );
//purposefully write with timestamp1 to ensure this doesn't take, it's a lower uuid
serialization.deleteEdge( scope, edge1, timestamp1 ).execute();
results = serialization.getEdgesFromSource( scope, createSearchByEdge( sourceId, "edge", now, null ) );
assertEquals( edge1, results.next() );
assertFalse( results.hasNext() );
versions = serialization
.getEdgeVersions( scope, createGetByEdge( sourceId, "edge", edge1.getTargetNode(), now, null ) );
assertEquals( edge1, versions.next() );
assertFalse( versions.hasNext() );
//should delete
serialization.deleteEdge( scope, edge1, timestamp2 ).execute();
//get our edges out by name
results = serialization.getEdgesFromSource( scope, createSearchByEdge( sourceId, "edge", now, null ) );
assertFalse( results.hasNext() );
versions = serialization
.getEdgeVersions( scope, createGetByEdge( sourceId, "edge", edge1.getTargetNode(), now, null ) );
assertFalse( versions.hasNext() );
//write with v3, should exist
serialization.writeEdge( scope, edge1, timestamp3 ).execute();
results = serialization.getEdgesFromSource( scope, createSearchByEdge( sourceId, "edge", now, null ) );
assertEquals( edge1, results.next() );
assertFalse( results.hasNext() );
versions = serialization
.getEdgeVersions( scope, createGetByEdge( sourceId, "edge", edge1.getTargetNode(), now, null ) );
assertEquals( edge1, versions.next() );
assertFalse( versions.hasNext() );
}
/**
* Tests writing 2 edges quickly in succession, then returning them. Was failing for commitlog impl
*/
@Test
public void successiveWriteReturnTarget() throws ConnectionException {
final MarkedEdge edge1 = createMarkedEdge( "source", "edge", "target" );
final Id targetId = edge1.getTargetNode();
final UUID timestamp1 = UUIDGenerator.newTimeUUID();
final UUID timestamp2 = UUIDGenerator.newTimeUUID();
final UUID timestamp3 = UUIDGenerator.newTimeUUID();
assertTrue( UUIDComparator.staticCompare( timestamp1, timestamp2 ) < 0 );
assertTrue( UUIDComparator.staticCompare( timestamp2, timestamp3 ) < 0 );
//we purposefully write with timestamp2
serialization.writeEdge( scope, edge1, timestamp2 ).execute();
long now = System.currentTimeMillis();
//get our edges out by name
Iterator<MarkedEdge> results =
serialization.getEdgesToTarget( scope, createSearchByEdge( targetId, "edge", now, null ) );
assertEquals( edge1, results.next() );
assertFalse( results.hasNext() );
Iterator<MarkedEdge> versions = serialization.getEdgeVersions( scope,
createGetByEdge( edge1.getSourceNode(), "edge", edge1.getTargetNode(), now, null ) );
assertEquals( edge1, versions.next() );
assertFalse( versions.hasNext() );
//purposefully write with timestamp1 to ensure this doesn't take, it's a lower uuid
serialization.deleteEdge( scope, edge1, timestamp1 ).execute();
results = serialization.getEdgesToTarget( scope, createSearchByEdge( targetId, "edge", now, null ) );
assertEquals( edge1, results.next() );
assertFalse( results.hasNext() );
versions = serialization.getEdgeVersions( scope,
createGetByEdge( edge1.getSourceNode(), "edge", edge1.getTargetNode(), now, null ) );
assertEquals( edge1, versions.next() );
assertFalse( versions.hasNext() );
//should delete
serialization.deleteEdge( scope, edge1, timestamp2 ).execute();
//get our edges out by name
results = serialization.getEdgesToTarget( scope, createSearchByEdge( targetId, "edge", now, null ) );
assertFalse( results.hasNext() );
versions = serialization.getEdgeVersions( scope,
createGetByEdge( edge1.getSourceNode(), "edge", edge1.getTargetNode(), now, null ) );
assertFalse( versions.hasNext() );
//write with v3, should exist
serialization.writeEdge( scope, edge1, timestamp3 ).execute();
results = serialization.getEdgesToTarget( scope, createSearchByEdge( targetId, "edge", now, null ) );
assertEquals( edge1, results.next() );
assertFalse( results.hasNext() );
versions = serialization.getEdgeVersions( scope,
createGetByEdge( edge1.getSourceNode(), "edge", edge1.getTargetNode(), now, null ) );
assertEquals( edge1, versions.next() );
assertFalse( versions.hasNext() );
}
@Test
public void testColumnTimestamps() throws ConnectionException {
ColumnFamily<String, String> cf = new ColumnFamily<>( "test", StringSerializer.get(), StringSerializer.get() );
if ( keyspace.describeKeyspace().getColumnFamily( "test" ) == null ) {
keyspace.createColumnFamily( cf, new HashMap<String, Object>() );
}
final String rowKey = "test";
final String colName = "colName" + UUID.randomUUID();
final long timestamp = 100l;
MutationBatch batch = keyspace.prepareMutationBatch().withConsistencyLevel( ConsistencyLevel.CL_QUORUM )
.setTimestamp( timestamp );
batch.withRow( cf, rowKey ).putColumn( colName, true );
batch.execute();
Column<String> column = keyspace.prepareQuery( cf ).getKey( rowKey ).getColumn( colName ).execute().getResult();
assertEquals( colName, column.getName() );
assertTrue( column.getBooleanValue() );
//now, delete with the same timestamp
batch = keyspace.prepareMutationBatch().withConsistencyLevel( ConsistencyLevel.CL_QUORUM )
.setTimestamp( timestamp );
batch.withRow( cf, rowKey ).deleteColumn( colName );
batch.execute();
try {
column = keyspace.prepareQuery( cf ).getKey( rowKey ).getColumn( colName ).execute().getResult();
fail( "I shouldn't return a value" );
}
catch ( NotFoundException nfe ) {
//swallow
}
//now write it again
batch = keyspace.prepareMutationBatch().withConsistencyLevel( ConsistencyLevel.CL_QUORUM )
.setTimestamp( timestamp );
batch.withRow( cf, rowKey ).putColumn( colName, true );
batch.execute();
try {
column = keyspace.prepareQuery( cf ).getKey( rowKey ).getColumn( colName ).execute().getResult();
fail( "I shouldn't return a value" );
}
catch ( NotFoundException nfe ) {
//swallow
}
}
private void verify( Iterator<MarkedEdge> results, int expectedCount ) {
int count = 0;
while ( results.hasNext() ) {
count++;
results.next();
}
assertEquals( "All versions returned", expectedCount, count );
}
private class IterableWrapper<T> implements Iterable<T> {
private final Iterator<T> source;
private IterableWrapper( final Iterator<T> source ) {this.source = source;}
@Override
public Iterator<T> iterator() {
return source;
}
}
}
|
google/j2objc | 35,175 | jre_emul/android/platform/external/icu/android_icu4j/src/main/java/android/icu/util/UResourceBundle.java | /* GENERATED SOURCE. DO NOT MODIFY. */
// © 2016 and later: Unicode, Inc. and others.
// License & terms of use: http://www.unicode.org/copyright.html#License
/*
*******************************************************************************
* Copyright (C) 2004-2016, International Business Machines Corporation and
* others. All Rights Reserved.
*******************************************************************************
*/
package android.icu.util;
import java.nio.ByteBuffer;
import java.util.Collections;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
import java.util.MissingResourceException;
import java.util.ResourceBundle;
import java.util.Set;
import java.util.TreeSet;
import java.util.concurrent.ConcurrentHashMap;
import android.icu.impl.ICUData;
import android.icu.impl.ICUResourceBundle;
import android.icu.impl.ICUResourceBundleReader;
import android.icu.impl.ResourceBundleWrapper;
/**
* <strong>[icu enhancement]</strong> ICU's replacement for {@link java.util.ResourceBundle}. Methods, fields, and other functionality specific to ICU are labeled '<strong>[icu]</strong>'.
*
* <p>A class representing a collection of resource information pertaining to a given
* locale. A resource bundle provides a way of accessing locale- specific information in a
* data file. You create a resource bundle that manages the resources for a given locale
* and then ask it for individual resources.
*
* <p>In ResourceBundle, an object is created and the sub-items are fetched using the
* getString and getObject methods. In UResourceBundle, each individual element of a
* resource is a resource by itself.
*
* <p>Resource bundles in ICU are currently defined using text files that conform to the
* following <a
* href="http://source.icu-project.org/repos/icu/icuhtml/trunk/design/bnf_rb.txt">BNF
* definition</a>. More on resource bundle concepts and syntax can be found in the <a
* href="http://www.icu-project.org/userguide/ResourceManagement.html">Users Guide</a>.
*
* <p>The packaging of ICU *.res files can be of two types
* ICU4C:
* <pre>
* root.res
* |
* --------
* | |
* fr.res en.res
* |
* --------
* | |
* fr_CA.res fr_FR.res
* </pre>
* JAVA/JDK:
* <pre>
* LocaleElements.res
* |
* -------------------
* | |
* LocaleElements_fr.res LocaleElements_en.res
* |
* ---------------------------
* | |
* LocaleElements_fr_CA.res LocaleElements_fr_FR.res
* </pre>
*
* Depending on the organization of your resources, the syntax to getBundleInstance will
* change. To open ICU style organization use:
*
* <pre>
* UResourceBundle bundle =
* UResourceBundle.getBundleInstance("com/mycompany/resources",
* "en_US", myClassLoader);
* </pre>
* To open Java/JDK style organization use:
* <pre>
* UResourceBundle bundle =
* UResourceBundle.getBundleInstance("com.mycompany.resources.LocaleElements",
* "en_US", myClassLoader);
* </pre>
*
* <p>Note: Please use pass a class loader for loading non-ICU resources. Java security does not
* allow loading of resources across jar files. You must provide your class loader
* to load the resources
* @author ram
* @hide Only a subset of ICU is exposed in Android
*/
public abstract class UResourceBundle extends ResourceBundle {
/**
* <strong>[icu]</strong> Creates a resource bundle using the specified base name and locale.
* ICU_DATA_CLASS is used as the default root.
* @param baseName string containing the name of the data package.
* If null the default ICU package name is used.
* @param localeName the locale for which a resource bundle is desired
* @throws MissingResourceException If no resource bundle for the specified base name
* can be found
* @return a resource bundle for the given base name and locale
*/
public static UResourceBundle getBundleInstance(String baseName, String localeName){
return getBundleInstance(baseName, localeName, ICUResourceBundle.ICU_DATA_CLASS_LOADER,
false);
}
/**
* <strong>[icu]</strong> Creates a resource bundle using the specified base name, locale, and class root.
*
* @param baseName string containing the name of the data package.
* If null the default ICU package name is used.
* @param localeName the locale for which a resource bundle is desired
* @param root the class object from which to load the resource bundle
* @throws MissingResourceException If no resource bundle for the specified base name
* can be found
* @return a resource bundle for the given base name and locale
*/
public static UResourceBundle getBundleInstance(String baseName, String localeName,
ClassLoader root){
return getBundleInstance(baseName, localeName, root, false);
}
/**
* <strong>[icu]</strong> Creates a resource bundle using the specified base name, locale, and class
* root.
*
* @param baseName string containing the name of the data package.
* If null the default ICU package name is used.
* @param localeName the locale for which a resource bundle is desired
* @param root the class object from which to load the resource bundle
* @param disableFallback Option to disable locale inheritence.
* If true the fallback chain will not be built.
* @throws MissingResourceException
* if no resource bundle for the specified base name can be found
* @return a resource bundle for the given base name and locale
*
*/
protected static UResourceBundle getBundleInstance(String baseName, String localeName,
ClassLoader root, boolean disableFallback) {
return instantiateBundle(baseName, localeName, root, disableFallback);
}
/**
* <strong>[icu]</strong> Sole constructor. (For invocation by subclass constructors, typically
* implicit.) This is public for compatibility with Java, whose compiler
* will generate public default constructors for an abstract class.
*/
public UResourceBundle() {
}
/**
* <strong>[icu]</strong> Creates a UResourceBundle for the locale specified, from which users can extract
* resources by using their corresponding keys.
* @param locale specifies the locale for which we want to open the resource.
* If null the bundle for default locale is opened.
* @return a resource bundle for the given locale
*/
public static UResourceBundle getBundleInstance(ULocale locale) {
if (locale==null) {
locale = ULocale.getDefault();
}
return getBundleInstance(ICUData.ICU_BASE_NAME, locale.getBaseName(),
ICUResourceBundle.ICU_DATA_CLASS_LOADER, false);
}
/**
* <strong>[icu]</strong> Creates a UResourceBundle for the default locale and specified base name,
* from which users can extract resources by using their corresponding keys.
* @param baseName string containing the name of the data package.
* If null the default ICU package name is used.
* @return a resource bundle for the given base name and default locale
*/
public static UResourceBundle getBundleInstance(String baseName) {
if (baseName == null) {
baseName = ICUData.ICU_BASE_NAME;
}
ULocale uloc = ULocale.getDefault();
return getBundleInstance(baseName, uloc.getBaseName(), ICUResourceBundle.ICU_DATA_CLASS_LOADER,
false);
}
/**
* <strong>[icu]</strong> Creates a UResourceBundle for the specified locale and specified base name,
* from which users can extract resources by using their corresponding keys.
* @param baseName string containing the name of the data package.
* If null the default ICU package name is used.
* @param locale specifies the locale for which we want to open the resource.
* If null the bundle for default locale is opened.
* @return a resource bundle for the given base name and locale
*/
public static UResourceBundle getBundleInstance(String baseName, Locale locale) {
if (baseName == null) {
baseName = ICUData.ICU_BASE_NAME;
}
ULocale uloc = locale == null ? ULocale.getDefault() : ULocale.forLocale(locale);
return getBundleInstance(baseName, uloc.getBaseName(),
ICUResourceBundle.ICU_DATA_CLASS_LOADER, false);
}
/**
* <strong>[icu]</strong> Creates a UResourceBundle, from which users can extract resources by using
* their corresponding keys.
* @param baseName string containing the name of the data package.
* If null the default ICU package name is used.
* @param locale specifies the locale for which we want to open the resource.
* If null the bundle for default locale is opened.
* @return a resource bundle for the given base name and locale
*/
public static UResourceBundle getBundleInstance(String baseName, ULocale locale) {
if (baseName == null) {
baseName = ICUData.ICU_BASE_NAME;
}
if (locale == null) {
locale = ULocale.getDefault();
}
return getBundleInstance(baseName, locale.getBaseName(),
ICUResourceBundle.ICU_DATA_CLASS_LOADER, false);
}
/**
* <strong>[icu]</strong> Creates a UResourceBundle for the specified locale and specified base name,
* from which users can extract resources by using their corresponding keys.
* @param baseName string containing the name of the data package.
* If null the default ICU package name is used.
* @param locale specifies the locale for which we want to open the resource.
* If null the bundle for default locale is opened.
* @param loader the loader to use
* @return a resource bundle for the given base name and locale
*/
public static UResourceBundle getBundleInstance(String baseName, Locale locale,
ClassLoader loader) {
if (baseName == null) {
baseName = ICUData.ICU_BASE_NAME;
}
ULocale uloc = locale == null ? ULocale.getDefault() : ULocale.forLocale(locale);
return getBundleInstance(baseName, uloc.getBaseName(), loader, false);
}
/**
* <strong>[icu]</strong> Creates a UResourceBundle, from which users can extract resources by using
* their corresponding keys.<br><br>
* Note: Please use this API for loading non-ICU resources. Java security does not
* allow loading of resources across jar files. You must provide your class loader
* to load the resources
* @param baseName string containing the name of the data package.
* If null the default ICU package name is used.
* @param locale specifies the locale for which we want to open the resource.
* If null the bundle for default locale is opened.
* @param loader the loader to use
* @return a resource bundle for the given base name and locale
*/
public static UResourceBundle getBundleInstance(String baseName, ULocale locale,
ClassLoader loader) {
if (baseName == null) {
baseName = ICUData.ICU_BASE_NAME;
}
if (locale == null) {
locale = ULocale.getDefault();
}
return getBundleInstance(baseName, locale.getBaseName(), loader, false);
}
/**
* <strong>[icu]</strong> Returns the RFC 3066 conformant locale id of this resource bundle.
* This method can be used after a call to getBundleInstance() to
* determine whether the resource bundle returned really
* corresponds to the requested locale or is a fallback.
*
* @return the locale of this resource bundle
*/
public abstract ULocale getULocale();
/**
* <strong>[icu]</strong> Returns the localeID
* @return The string representation of the localeID
*/
protected abstract String getLocaleID();
/**
* <strong>[icu]</strong> Returns the base name of the resource bundle
* @return The string representation of the base name
*/
protected abstract String getBaseName();
/**
* <strong>[icu]</strong> Returns the parent bundle
* @return The parent bundle
*/
protected abstract UResourceBundle getParent();
/**
* Returns the locale of this bundle
* @return the locale of this resource bundle
*/
@Override
public Locale getLocale(){
return getULocale().toLocale();
}
private enum RootType { MISSING, ICU, JAVA }
private static Map<String, RootType> ROOT_CACHE = new ConcurrentHashMap<String, RootType>();
private static RootType getRootType(String baseName, ClassLoader root) {
RootType rootType = ROOT_CACHE.get(baseName);
if (rootType == null) {
String rootLocale = (baseName.indexOf('.')==-1) ? "root" : "";
try{
ICUResourceBundle.getBundleInstance(baseName, rootLocale, root, true);
rootType = RootType.ICU;
}catch(MissingResourceException ex){
try{
ResourceBundleWrapper.getBundleInstance(baseName, rootLocale, root, true);
rootType = RootType.JAVA;
}catch(MissingResourceException e){
//throw away the exception
rootType = RootType.MISSING;
}
}
ROOT_CACHE.put(baseName, rootType);
}
return rootType;
}
private static void setRootType(String baseName, RootType rootType) {
ROOT_CACHE.put(baseName, rootType);
}
/**
* <strong>[icu]</strong> Loads a new resource bundle for the given base name, locale and class loader.
* Optionally will disable loading of fallback bundles.
* @param baseName string containing the name of the data package.
* If null the default ICU package name is used.
* @param localeName the locale for which a resource bundle is desired
* @param root the class object from which to load the resource bundle
* @param disableFallback disables loading of fallback lookup chain
* @throws MissingResourceException If no resource bundle for the specified base name
* can be found
* @return a resource bundle for the given base name and locale
*/
protected static UResourceBundle instantiateBundle(String baseName, String localeName,
ClassLoader root, boolean disableFallback) {
RootType rootType = getRootType(baseName, root);
switch (rootType) {
case ICU:
return ICUResourceBundle.getBundleInstance(baseName, localeName, root, disableFallback);
case JAVA:
return ResourceBundleWrapper.getBundleInstance(baseName, localeName, root,
disableFallback);
case MISSING:
default:
UResourceBundle b;
try{
b = ICUResourceBundle.getBundleInstance(baseName, localeName, root,
disableFallback);
setRootType(baseName, RootType.ICU);
}catch(MissingResourceException ex){
b = ResourceBundleWrapper.getBundleInstance(baseName, localeName, root,
disableFallback);
setRootType(baseName, RootType.JAVA);
}
return b;
}
}
/**
* <strong>[icu]</strong> Returns a binary data item from a binary resource, as a read-only ByteBuffer.
*
* @return a pointer to a chunk of unsigned bytes which live in a memory mapped/DLL
* file.
* @see #getIntVector
* @see #getInt
* @throws MissingResourceException If no resource bundle can be found.
* @throws UResourceTypeMismatchException If the resource has a type mismatch.
*/
public ByteBuffer getBinary() {
throw new UResourceTypeMismatchException("");
}
/**
* Returns a string from a string resource type
*
* @return a string
* @see #getBinary()
* @see #getIntVector
* @see #getInt
* @throws MissingResourceException If resource bundle is missing.
* @throws UResourceTypeMismatchException If resource bundle has a type mismatch.
*/
public String getString() {
throw new UResourceTypeMismatchException("");
}
/**
* Returns a string array from a array resource type
*
* @return a string
* @see #getString()
* @see #getIntVector
* @throws MissingResourceException If resource bundle is missing.
* @throws UResourceTypeMismatchException If resource bundle has a type mismatch.
*/
public String[] getStringArray() {
throw new UResourceTypeMismatchException("");
}
/**
* <strong>[icu]</strong> Returns a binary data from a binary resource, as a byte array with a copy
* of the bytes from the resource bundle.
*
* @param ba The byte array to write the bytes to. A null variable is OK.
* @return an array of bytes containing the binary data from the resource.
* @see #getIntVector
* @see #getInt
* @throws MissingResourceException If resource bundle is missing.
* @throws UResourceTypeMismatchException If resource bundle has a type mismatch.
*/
public byte[] getBinary(byte[] ba) {
throw new UResourceTypeMismatchException("");
}
/**
* <strong>[icu]</strong> Returns a 32 bit integer array from a resource.
*
* @return a pointer to a chunk of unsigned bytes which live in a memory mapped/DLL file.
* @see #getBinary()
* @see #getInt
* @throws MissingResourceException If resource bundle is missing.
* @throws UResourceTypeMismatchException If resource bundle has a type mismatch.
*/
public int[] getIntVector() {
throw new UResourceTypeMismatchException("");
}
/**
* <strong>[icu]</strong> Returns a signed integer from a resource.
*
* @return an integer value
* @see #getIntVector
* @see #getBinary()
* @throws MissingResourceException If resource bundle is missing.
* @throws UResourceTypeMismatchException If resource bundle type mismatch.
*/
public int getInt() {
throw new UResourceTypeMismatchException("");
}
/**
* <strong>[icu]</strong> Returns a unsigned integer from a resource.
* This integer is originally 28 bit and the sign gets propagated.
*
* @return an integer value
* @see #getIntVector
* @see #getBinary()
* @throws MissingResourceException If resource bundle is missing.
* @throws UResourceTypeMismatchException If resource bundle type mismatch.
*/
public int getUInt() {
throw new UResourceTypeMismatchException("");
}
/**
* <strong>[icu]</strong> Returns a resource in a given resource that has a given key.
*
* @param aKey a key associated with the wanted resource
* @return a resource bundle object representing the resource
* @throws MissingResourceException If resource bundle is missing.
*/
public UResourceBundle get(String aKey) {
UResourceBundle obj = findTopLevel(aKey);
if (obj == null) {
String fullName = ICUResourceBundleReader.getFullName(getBaseName(), getLocaleID());
throw new MissingResourceException(
"Can't find resource for bundle " + fullName + ", key "
+ aKey, this.getClass().getName(), aKey);
}
return obj;
}
/**
* Returns a resource in a given resource that has a given key, or null if the
* resource is not found.
*
* @param aKey the key associated with the wanted resource
* @return the resource, or null
* @see #get(String)
* @deprecated This API is ICU internal only.
* @hide draft / provisional / internal are hidden on Android
*/
@Deprecated
protected UResourceBundle findTopLevel(String aKey) {
// NOTE: this only works for top-level resources. For resources at lower
// levels, it fails when you fall back to the parent, since you're now
// looking at root resources, not at the corresponding nested resource.
for (UResourceBundle res = this; res != null; res = res.getParent()) {
UResourceBundle obj = res.handleGet(aKey, null, this);
if (obj != null) {
return obj;
}
}
return null;
}
/**
* Returns the string in a given resource at the specified index.
*
* @param index an index to the wanted string.
* @return a string which lives in the resource.
* @throws IndexOutOfBoundsException If the index value is out of bounds of accepted values.
* @throws UResourceTypeMismatchException If resource bundle type mismatch.
*/
public String getString(int index) {
ICUResourceBundle temp = (ICUResourceBundle)get(index);
if (temp.getType() == STRING) {
return temp.getString();
}
throw new UResourceTypeMismatchException("");
}
/**
* <strong>[icu]</strong> Returns the resource in a given resource at the specified index.
*
* @param index an index to the wanted resource.
* @return the sub resource UResourceBundle object
* @throws IndexOutOfBoundsException If the index value is out of bounds of accepted values.
* @throws MissingResourceException If the resource bundle is missing.
*/
public UResourceBundle get(int index) {
UResourceBundle obj = handleGet(index, null, this);
if (obj == null) {
obj = getParent();
if (obj != null) {
obj = obj.get(index);
}
if (obj == null)
throw new MissingResourceException(
"Can't find resource for bundle "
+ this.getClass().getName() + ", key "
+ getKey(), this.getClass().getName(), getKey());
}
return obj;
}
/**
* Returns a resource in a given resource that has a given index, or null if the
* resource is not found.
*
* @param index the index of the resource
* @return the resource, or null
* @see #get(int)
* @deprecated This API is ICU internal only.
* @hide draft / provisional / internal are hidden on Android
*/
@Deprecated
protected UResourceBundle findTopLevel(int index) {
// NOTE: this _barely_ works for top-level resources. For resources at lower
// levels, it fails when you fall back to the parent, since you're now
// looking at root resources, not at the corresponding nested resource.
// Not only that, but unless the indices correspond 1-to-1, the index will
// lose meaning. Essentially this only works if the child resource arrays
// are prefixes of their parent arrays.
for (UResourceBundle res = this; res != null; res = res.getParent()) {
UResourceBundle obj = res.handleGet(index, null, this);
if (obj != null) {
return obj;
}
}
return null;
}
/**
* Returns the keys in this bundle as an enumeration
* @return an enumeration containing key strings,
* which is empty if this is not a bundle or a table resource
*/
@Override
public Enumeration<String> getKeys() {
return Collections.enumeration(keySet());
}
/**
* Returns a Set of all keys contained in this ResourceBundle and its parent bundles.
* @return a Set of all keys contained in this ResourceBundle and its parent bundles,
* which is empty if this is not a bundle or a table resource
* @deprecated This API is ICU internal only.
* @hide draft / provisional / internal are hidden on Android
*/
@Override
@Deprecated
public Set<String> keySet() {
// TODO: Java 6 ResourceBundle has keySet() which calls handleKeySet()
// and caches the results.
// When we upgrade to Java 6, we still need to check for isTopLevelResource().
// Keep the else branch as is. The if body should just return super.keySet().
// Remove then-redundant caching of the keys.
Set<String> keys = null;
ICUResourceBundle icurb = null;
if(isTopLevelResource() && this instanceof ICUResourceBundle) {
// We do not cache the top-level keys in this base class so that
// not every string/int/binary... resource has to have a keys cache field.
icurb = (ICUResourceBundle)this;
keys = icurb.getTopLevelKeySet();
}
if(keys == null) {
if(isTopLevelResource()) {
TreeSet<String> newKeySet;
if(parent == null) {
newKeySet = new TreeSet<String>();
} else if(parent instanceof UResourceBundle) {
newKeySet = new TreeSet<String>(((UResourceBundle)parent).keySet());
} else {
// TODO: Java 6 ResourceBundle has keySet(); use it when we upgrade to Java 6
// and remove this else branch.
newKeySet = new TreeSet<String>();
Enumeration<String> parentKeys = parent.getKeys();
while(parentKeys.hasMoreElements()) {
newKeySet.add(parentKeys.nextElement());
}
}
newKeySet.addAll(handleKeySet());
keys = Collections.unmodifiableSet(newKeySet);
if(icurb != null) {
icurb.setTopLevelKeySet(keys);
}
} else {
return handleKeySet();
}
}
return keys;
}
/**
* Returns a Set of the keys contained <i>only</i> in this ResourceBundle.
* This does not include further keys from parent bundles.
* @return a Set of the keys contained only in this ResourceBundle,
* which is empty if this is not a bundle or a table resource
* @deprecated This API is ICU internal only.
* @hide draft / provisional / internal are hidden on Android
*/
@Override
@Deprecated
protected Set<String> handleKeySet() {
return Collections.emptySet();
}
/**
* <strong>[icu]</strong> Returns the size of a resource. Size for scalar types is always 1, and for
* vector/table types is the number of child resources.
*
* <br><b>Note:</b> Integer array is treated as a scalar type. There are no APIs to
* access individual members of an integer array. It is always returned as a whole.
* @return number of resources in a given resource.
*/
public int getSize() {
return 1;
}
/**
* <strong>[icu]</strong> Returns the type of a resource.
* Available types are {@link #INT INT}, {@link #ARRAY ARRAY},
* {@link #BINARY BINARY}, {@link #INT_VECTOR INT_VECTOR},
* {@link #STRING STRING}, {@link #TABLE TABLE}.
*
* @return type of the given resource.
*/
public int getType() {
return NONE;
}
/**
* <strong>[icu]</strong> Return the version number associated with this UResourceBundle as an
* VersionInfo object.
* @return VersionInfo object containing the version of the bundle
*/
public VersionInfo getVersion() {
return null;
}
/**
* <strong>[icu]</strong> Returns the iterator which iterates over this
* resource bundle
* @return UResourceBundleIterator that iterates over the resources in the bundle
*/
public UResourceBundleIterator getIterator() {
return new UResourceBundleIterator(this);
}
/**
* <strong>[icu]</strong> Returns the key associated with a given resource. Not all the resources have
* a key - only those that are members of a table.
* @return a key associated to this resource, or null if it doesn't have a key
*/
public String getKey() {
return null;
}
/**
* <strong>[icu]</strong> Resource type constant for "no resource".
*/
public static final int NONE = -1;
/**
* <strong>[icu]</strong> Resource type constant for strings.
*/
public static final int STRING = 0;
/**
* <strong>[icu]</strong> Resource type constant for binary data.
*/
public static final int BINARY = 1;
/**
* <strong>[icu]</strong> Resource type constant for tables of key-value pairs.
*/
public static final int TABLE = 2;
/**
* <strong>[icu]</strong> Resource type constant for a single 28-bit integer, interpreted as
* signed or unsigned by the getInt() function.
* @see #getInt
*/
public static final int INT = 7;
/**
* <strong>[icu]</strong> Resource type constant for arrays of resources.
*/
public static final int ARRAY = 8;
/**
* Resource type constant for vectors of 32-bit integers.
* @see #getIntVector
*/
public static final int INT_VECTOR = 14;
//====== protected members ==============
/**
* <strong>[icu]</strong> Actual worker method for fetching a resource based on the given key.
* Sub classes must override this method if they support resources with keys.
* @param aKey the key string of the resource to be fetched
* @param aliasesVisited hashtable object to hold references of resources already seen
* @param requested the original resource bundle object on which the get method was invoked.
* The requested bundle and the bundle on which this method is invoked
* are the same, except in the cases where aliases are involved.
* @return UResourceBundle a resource associated with the key
*/
protected UResourceBundle handleGet(String aKey, HashMap<String, String> aliasesVisited,
UResourceBundle requested) {
return null;
}
/**
* <strong>[icu]</strong> Actual worker method for fetching a resource based on the given index.
* Sub classes must override this method if they support arrays of resources.
* @param index the index of the resource to be fetched
* @param aliasesVisited hashtable object to hold references of resources already seen
* @param requested the original resource bundle object on which the get method was invoked.
* The requested bundle and the bundle on which this method is invoked
* are the same, except in the cases where aliases are involved.
* @return UResourceBundle a resource associated with the index
*/
protected UResourceBundle handleGet(int index, HashMap<String, String> aliasesVisited,
UResourceBundle requested) {
return null;
}
/**
* <strong>[icu]</strong> Actual worker method for fetching the array of strings in a resource.
* Sub classes must override this method if they support arrays of strings.
* @return String[] An array of strings containing strings
*/
protected String[] handleGetStringArray() {
return null;
}
/**
* <strong>[icu]</strong> Actual worker method for fetching the keys of resources contained in the resource.
* Sub classes must override this method if they support keys and associated resources.
*
* @return Enumeration An enumeration of all the keys in this resource.
*/
protected Enumeration<String> handleGetKeys(){
return null;
}
/**
* {@inheritDoc}
*/
// this method is declared in ResourceBundle class
// so cannot change the signature
// Override this method
@Override
protected Object handleGetObject(String aKey) {
return handleGetObjectImpl(aKey, this);
}
/**
* Override the superclass method
*/
// To facilitate XPath style aliases we need a way to pass the reference
// to requested locale. The only way I could figure out is to implement
// the look up logic here. This has a disadvantage that if the client
// loads an ICUResourceBundle, calls ResourceBundle.getObject method
// with a key that does not exist in the bundle then the lookup is
// done twice before throwing a MissingResourceExpection.
private Object handleGetObjectImpl(String aKey, UResourceBundle requested) {
Object obj = resolveObject(aKey, requested);
if (obj == null) {
UResourceBundle parentBundle = getParent();
if (parentBundle != null) {
obj = parentBundle.handleGetObjectImpl(aKey, requested);
}
if (obj == null)
throw new MissingResourceException(
"Can't find resource for bundle "
+ this.getClass().getName() + ", key " + aKey,
this.getClass().getName(), aKey);
}
return obj;
}
// Routine for figuring out the type of object to be returned
// string or string array
private Object resolveObject(String aKey, UResourceBundle requested) {
if (getType() == STRING) {
return getString();
}
UResourceBundle obj = handleGet(aKey, null, requested);
if (obj != null) {
if (obj.getType() == STRING) {
return obj.getString();
}
try {
if (obj.getType() == ARRAY) {
return obj.handleGetStringArray();
}
} catch (UResourceTypeMismatchException ex) {
return obj;
}
}
return obj;
}
/**
* Is this a top-level resource, that is, a whole bundle?
* @return true if this is a top-level resource
* @deprecated This API is ICU internal only.
* @hide draft / provisional / internal are hidden on Android
*/
@Deprecated
protected boolean isTopLevelResource() {
return true;
}
}
|
googleapis/google-cloud-java | 34,947 | java-alloydb/proto-google-cloud-alloydb-v1beta/src/main/java/com/google/cloud/alloydb/v1beta/EncryptionInfo.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/alloydb/v1beta/resources.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.alloydb.v1beta;
/**
*
*
* <pre>
* EncryptionInfo describes the encryption information of a cluster or a backup.
* </pre>
*
* Protobuf type {@code google.cloud.alloydb.v1beta.EncryptionInfo}
*/
public final class EncryptionInfo extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.alloydb.v1beta.EncryptionInfo)
EncryptionInfoOrBuilder {
private static final long serialVersionUID = 0L;
// Use EncryptionInfo.newBuilder() to construct.
private EncryptionInfo(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private EncryptionInfo() {
encryptionType_ = 0;
kmsKeyVersions_ = com.google.protobuf.LazyStringArrayList.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new EncryptionInfo();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.alloydb.v1beta.ResourcesProto
.internal_static_google_cloud_alloydb_v1beta_EncryptionInfo_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.alloydb.v1beta.ResourcesProto
.internal_static_google_cloud_alloydb_v1beta_EncryptionInfo_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.alloydb.v1beta.EncryptionInfo.class,
com.google.cloud.alloydb.v1beta.EncryptionInfo.Builder.class);
}
/**
*
*
* <pre>
* Possible encryption types.
* </pre>
*
* Protobuf enum {@code google.cloud.alloydb.v1beta.EncryptionInfo.Type}
*/
public enum Type implements com.google.protobuf.ProtocolMessageEnum {
/**
*
*
* <pre>
* Encryption type not specified. Defaults to GOOGLE_DEFAULT_ENCRYPTION.
* </pre>
*
* <code>TYPE_UNSPECIFIED = 0;</code>
*/
TYPE_UNSPECIFIED(0),
/**
*
*
* <pre>
* The data is encrypted at rest with a key that is fully managed by Google.
* No key version will be populated. This is the default state.
* </pre>
*
* <code>GOOGLE_DEFAULT_ENCRYPTION = 1;</code>
*/
GOOGLE_DEFAULT_ENCRYPTION(1),
/**
*
*
* <pre>
* The data is encrypted at rest with a key that is managed by the customer.
* KMS key versions will be populated.
* </pre>
*
* <code>CUSTOMER_MANAGED_ENCRYPTION = 2;</code>
*/
CUSTOMER_MANAGED_ENCRYPTION(2),
UNRECOGNIZED(-1),
;
/**
*
*
* <pre>
* Encryption type not specified. Defaults to GOOGLE_DEFAULT_ENCRYPTION.
* </pre>
*
* <code>TYPE_UNSPECIFIED = 0;</code>
*/
public static final int TYPE_UNSPECIFIED_VALUE = 0;
/**
*
*
* <pre>
* The data is encrypted at rest with a key that is fully managed by Google.
* No key version will be populated. This is the default state.
* </pre>
*
* <code>GOOGLE_DEFAULT_ENCRYPTION = 1;</code>
*/
public static final int GOOGLE_DEFAULT_ENCRYPTION_VALUE = 1;
/**
*
*
* <pre>
* The data is encrypted at rest with a key that is managed by the customer.
* KMS key versions will be populated.
* </pre>
*
* <code>CUSTOMER_MANAGED_ENCRYPTION = 2;</code>
*/
public static final int CUSTOMER_MANAGED_ENCRYPTION_VALUE = 2;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static Type valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static Type forNumber(int value) {
switch (value) {
case 0:
return TYPE_UNSPECIFIED;
case 1:
return GOOGLE_DEFAULT_ENCRYPTION;
case 2:
return CUSTOMER_MANAGED_ENCRYPTION;
default:
return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<Type> internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap<Type> internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap<Type>() {
public Type findValueByNumber(int number) {
return Type.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalStateException(
"Can't get the descriptor of an unrecognized enum value.");
}
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() {
return com.google.cloud.alloydb.v1beta.EncryptionInfo.getDescriptor().getEnumTypes().get(0);
}
private static final Type[] VALUES = values();
public static Type valueOf(com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private Type(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:google.cloud.alloydb.v1beta.EncryptionInfo.Type)
}
public static final int ENCRYPTION_TYPE_FIELD_NUMBER = 1;
private int encryptionType_ = 0;
/**
*
*
* <pre>
* Output only. Type of encryption.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1beta.EncryptionInfo.Type encryption_type = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The enum numeric value on the wire for encryptionType.
*/
@java.lang.Override
public int getEncryptionTypeValue() {
return encryptionType_;
}
/**
*
*
* <pre>
* Output only. Type of encryption.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1beta.EncryptionInfo.Type encryption_type = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The encryptionType.
*/
@java.lang.Override
public com.google.cloud.alloydb.v1beta.EncryptionInfo.Type getEncryptionType() {
com.google.cloud.alloydb.v1beta.EncryptionInfo.Type result =
com.google.cloud.alloydb.v1beta.EncryptionInfo.Type.forNumber(encryptionType_);
return result == null
? com.google.cloud.alloydb.v1beta.EncryptionInfo.Type.UNRECOGNIZED
: result;
}
public static final int KMS_KEY_VERSIONS_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private com.google.protobuf.LazyStringArrayList kmsKeyVersions_ =
com.google.protobuf.LazyStringArrayList.emptyList();
/**
*
*
* <pre>
* Output only. Cloud KMS key versions that are being used to protect the
* database or the backup.
* </pre>
*
* <code>
* repeated string kms_key_versions = 2 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }
* </code>
*
* @return A list containing the kmsKeyVersions.
*/
public com.google.protobuf.ProtocolStringList getKmsKeyVersionsList() {
return kmsKeyVersions_;
}
/**
*
*
* <pre>
* Output only. Cloud KMS key versions that are being used to protect the
* database or the backup.
* </pre>
*
* <code>
* repeated string kms_key_versions = 2 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The count of kmsKeyVersions.
*/
public int getKmsKeyVersionsCount() {
return kmsKeyVersions_.size();
}
/**
*
*
* <pre>
* Output only. Cloud KMS key versions that are being used to protect the
* database or the backup.
* </pre>
*
* <code>
* repeated string kms_key_versions = 2 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }
* </code>
*
* @param index The index of the element to return.
* @return The kmsKeyVersions at the given index.
*/
public java.lang.String getKmsKeyVersions(int index) {
return kmsKeyVersions_.get(index);
}
/**
*
*
* <pre>
* Output only. Cloud KMS key versions that are being used to protect the
* database or the backup.
* </pre>
*
* <code>
* repeated string kms_key_versions = 2 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }
* </code>
*
* @param index The index of the value to return.
* @return The bytes of the kmsKeyVersions at the given index.
*/
public com.google.protobuf.ByteString getKmsKeyVersionsBytes(int index) {
return kmsKeyVersions_.getByteString(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (encryptionType_
!= com.google.cloud.alloydb.v1beta.EncryptionInfo.Type.TYPE_UNSPECIFIED.getNumber()) {
output.writeEnum(1, encryptionType_);
}
for (int i = 0; i < kmsKeyVersions_.size(); i++) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, kmsKeyVersions_.getRaw(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (encryptionType_
!= com.google.cloud.alloydb.v1beta.EncryptionInfo.Type.TYPE_UNSPECIFIED.getNumber()) {
size += com.google.protobuf.CodedOutputStream.computeEnumSize(1, encryptionType_);
}
{
int dataSize = 0;
for (int i = 0; i < kmsKeyVersions_.size(); i++) {
dataSize += computeStringSizeNoTag(kmsKeyVersions_.getRaw(i));
}
size += dataSize;
size += 1 * getKmsKeyVersionsList().size();
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.alloydb.v1beta.EncryptionInfo)) {
return super.equals(obj);
}
com.google.cloud.alloydb.v1beta.EncryptionInfo other =
(com.google.cloud.alloydb.v1beta.EncryptionInfo) obj;
if (encryptionType_ != other.encryptionType_) return false;
if (!getKmsKeyVersionsList().equals(other.getKmsKeyVersionsList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + ENCRYPTION_TYPE_FIELD_NUMBER;
hash = (53 * hash) + encryptionType_;
if (getKmsKeyVersionsCount() > 0) {
hash = (37 * hash) + KMS_KEY_VERSIONS_FIELD_NUMBER;
hash = (53 * hash) + getKmsKeyVersionsList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.alloydb.v1beta.EncryptionInfo parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.alloydb.v1beta.EncryptionInfo parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.alloydb.v1beta.EncryptionInfo parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.alloydb.v1beta.EncryptionInfo parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.alloydb.v1beta.EncryptionInfo parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.alloydb.v1beta.EncryptionInfo parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.alloydb.v1beta.EncryptionInfo parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.alloydb.v1beta.EncryptionInfo parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.alloydb.v1beta.EncryptionInfo parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.alloydb.v1beta.EncryptionInfo parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.alloydb.v1beta.EncryptionInfo parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.alloydb.v1beta.EncryptionInfo parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.alloydb.v1beta.EncryptionInfo prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* EncryptionInfo describes the encryption information of a cluster or a backup.
* </pre>
*
* Protobuf type {@code google.cloud.alloydb.v1beta.EncryptionInfo}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.alloydb.v1beta.EncryptionInfo)
com.google.cloud.alloydb.v1beta.EncryptionInfoOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.alloydb.v1beta.ResourcesProto
.internal_static_google_cloud_alloydb_v1beta_EncryptionInfo_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.alloydb.v1beta.ResourcesProto
.internal_static_google_cloud_alloydb_v1beta_EncryptionInfo_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.alloydb.v1beta.EncryptionInfo.class,
com.google.cloud.alloydb.v1beta.EncryptionInfo.Builder.class);
}
// Construct using com.google.cloud.alloydb.v1beta.EncryptionInfo.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
encryptionType_ = 0;
kmsKeyVersions_ = com.google.protobuf.LazyStringArrayList.emptyList();
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.alloydb.v1beta.ResourcesProto
.internal_static_google_cloud_alloydb_v1beta_EncryptionInfo_descriptor;
}
@java.lang.Override
public com.google.cloud.alloydb.v1beta.EncryptionInfo getDefaultInstanceForType() {
return com.google.cloud.alloydb.v1beta.EncryptionInfo.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.alloydb.v1beta.EncryptionInfo build() {
com.google.cloud.alloydb.v1beta.EncryptionInfo result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.alloydb.v1beta.EncryptionInfo buildPartial() {
com.google.cloud.alloydb.v1beta.EncryptionInfo result =
new com.google.cloud.alloydb.v1beta.EncryptionInfo(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.alloydb.v1beta.EncryptionInfo result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.encryptionType_ = encryptionType_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
kmsKeyVersions_.makeImmutable();
result.kmsKeyVersions_ = kmsKeyVersions_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.alloydb.v1beta.EncryptionInfo) {
return mergeFrom((com.google.cloud.alloydb.v1beta.EncryptionInfo) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.alloydb.v1beta.EncryptionInfo other) {
if (other == com.google.cloud.alloydb.v1beta.EncryptionInfo.getDefaultInstance()) return this;
if (other.encryptionType_ != 0) {
setEncryptionTypeValue(other.getEncryptionTypeValue());
}
if (!other.kmsKeyVersions_.isEmpty()) {
if (kmsKeyVersions_.isEmpty()) {
kmsKeyVersions_ = other.kmsKeyVersions_;
bitField0_ |= 0x00000002;
} else {
ensureKmsKeyVersionsIsMutable();
kmsKeyVersions_.addAll(other.kmsKeyVersions_);
}
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8:
{
encryptionType_ = input.readEnum();
bitField0_ |= 0x00000001;
break;
} // case 8
case 18:
{
java.lang.String s = input.readStringRequireUtf8();
ensureKmsKeyVersionsIsMutable();
kmsKeyVersions_.add(s);
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private int encryptionType_ = 0;
/**
*
*
* <pre>
* Output only. Type of encryption.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1beta.EncryptionInfo.Type encryption_type = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The enum numeric value on the wire for encryptionType.
*/
@java.lang.Override
public int getEncryptionTypeValue() {
return encryptionType_;
}
/**
*
*
* <pre>
* Output only. Type of encryption.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1beta.EncryptionInfo.Type encryption_type = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @param value The enum numeric value on the wire for encryptionType to set.
* @return This builder for chaining.
*/
public Builder setEncryptionTypeValue(int value) {
encryptionType_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. Type of encryption.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1beta.EncryptionInfo.Type encryption_type = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The encryptionType.
*/
@java.lang.Override
public com.google.cloud.alloydb.v1beta.EncryptionInfo.Type getEncryptionType() {
com.google.cloud.alloydb.v1beta.EncryptionInfo.Type result =
com.google.cloud.alloydb.v1beta.EncryptionInfo.Type.forNumber(encryptionType_);
return result == null
? com.google.cloud.alloydb.v1beta.EncryptionInfo.Type.UNRECOGNIZED
: result;
}
/**
*
*
* <pre>
* Output only. Type of encryption.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1beta.EncryptionInfo.Type encryption_type = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @param value The encryptionType to set.
* @return This builder for chaining.
*/
public Builder setEncryptionType(com.google.cloud.alloydb.v1beta.EncryptionInfo.Type value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
encryptionType_ = value.getNumber();
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. Type of encryption.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1beta.EncryptionInfo.Type encryption_type = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return This builder for chaining.
*/
public Builder clearEncryptionType() {
bitField0_ = (bitField0_ & ~0x00000001);
encryptionType_ = 0;
onChanged();
return this;
}
private com.google.protobuf.LazyStringArrayList kmsKeyVersions_ =
com.google.protobuf.LazyStringArrayList.emptyList();
private void ensureKmsKeyVersionsIsMutable() {
if (!kmsKeyVersions_.isModifiable()) {
kmsKeyVersions_ = new com.google.protobuf.LazyStringArrayList(kmsKeyVersions_);
}
bitField0_ |= 0x00000002;
}
/**
*
*
* <pre>
* Output only. Cloud KMS key versions that are being used to protect the
* database or the backup.
* </pre>
*
* <code>
* repeated string kms_key_versions = 2 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }
* </code>
*
* @return A list containing the kmsKeyVersions.
*/
public com.google.protobuf.ProtocolStringList getKmsKeyVersionsList() {
kmsKeyVersions_.makeImmutable();
return kmsKeyVersions_;
}
/**
*
*
* <pre>
* Output only. Cloud KMS key versions that are being used to protect the
* database or the backup.
* </pre>
*
* <code>
* repeated string kms_key_versions = 2 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The count of kmsKeyVersions.
*/
public int getKmsKeyVersionsCount() {
return kmsKeyVersions_.size();
}
/**
*
*
* <pre>
* Output only. Cloud KMS key versions that are being used to protect the
* database or the backup.
* </pre>
*
* <code>
* repeated string kms_key_versions = 2 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }
* </code>
*
* @param index The index of the element to return.
* @return The kmsKeyVersions at the given index.
*/
public java.lang.String getKmsKeyVersions(int index) {
return kmsKeyVersions_.get(index);
}
/**
*
*
* <pre>
* Output only. Cloud KMS key versions that are being used to protect the
* database or the backup.
* </pre>
*
* <code>
* repeated string kms_key_versions = 2 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }
* </code>
*
* @param index The index of the value to return.
* @return The bytes of the kmsKeyVersions at the given index.
*/
public com.google.protobuf.ByteString getKmsKeyVersionsBytes(int index) {
return kmsKeyVersions_.getByteString(index);
}
/**
*
*
* <pre>
* Output only. Cloud KMS key versions that are being used to protect the
* database or the backup.
* </pre>
*
* <code>
* repeated string kms_key_versions = 2 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }
* </code>
*
* @param index The index to set the value at.
* @param value The kmsKeyVersions to set.
* @return This builder for chaining.
*/
public Builder setKmsKeyVersions(int index, java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureKmsKeyVersionsIsMutable();
kmsKeyVersions_.set(index, value);
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. Cloud KMS key versions that are being used to protect the
* database or the backup.
* </pre>
*
* <code>
* repeated string kms_key_versions = 2 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The kmsKeyVersions to add.
* @return This builder for chaining.
*/
public Builder addKmsKeyVersions(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureKmsKeyVersionsIsMutable();
kmsKeyVersions_.add(value);
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. Cloud KMS key versions that are being used to protect the
* database or the backup.
* </pre>
*
* <code>
* repeated string kms_key_versions = 2 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }
* </code>
*
* @param values The kmsKeyVersions to add.
* @return This builder for chaining.
*/
public Builder addAllKmsKeyVersions(java.lang.Iterable<java.lang.String> values) {
ensureKmsKeyVersionsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, kmsKeyVersions_);
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. Cloud KMS key versions that are being used to protect the
* database or the backup.
* </pre>
*
* <code>
* repeated string kms_key_versions = 2 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearKmsKeyVersions() {
kmsKeyVersions_ = com.google.protobuf.LazyStringArrayList.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);
;
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. Cloud KMS key versions that are being used to protect the
* database or the backup.
* </pre>
*
* <code>
* repeated string kms_key_versions = 2 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes of the kmsKeyVersions to add.
* @return This builder for chaining.
*/
public Builder addKmsKeyVersionsBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
ensureKmsKeyVersionsIsMutable();
kmsKeyVersions_.add(value);
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.alloydb.v1beta.EncryptionInfo)
}
// @@protoc_insertion_point(class_scope:google.cloud.alloydb.v1beta.EncryptionInfo)
private static final com.google.cloud.alloydb.v1beta.EncryptionInfo DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.alloydb.v1beta.EncryptionInfo();
}
public static com.google.cloud.alloydb.v1beta.EncryptionInfo getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<EncryptionInfo> PARSER =
new com.google.protobuf.AbstractParser<EncryptionInfo>() {
@java.lang.Override
public EncryptionInfo parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<EncryptionInfo> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<EncryptionInfo> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.alloydb.v1beta.EncryptionInfo getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
openjdk/nashorn | 35,245 | src/org.openjdk.nashorn/share/classes/org/openjdk/nashorn/internal/codegen/ObjectClassGenerator.java | /*
* Copyright (c) 2010, 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package org.openjdk.nashorn.internal.codegen;
import static org.openjdk.nashorn.internal.codegen.Compiler.SCRIPTS_PACKAGE;
import static org.openjdk.nashorn.internal.codegen.CompilerConstants.ALLOCATE;
import static org.openjdk.nashorn.internal.codegen.CompilerConstants.INIT_ARGUMENTS;
import static org.openjdk.nashorn.internal.codegen.CompilerConstants.INIT_MAP;
import static org.openjdk.nashorn.internal.codegen.CompilerConstants.INIT_SCOPE;
import static org.openjdk.nashorn.internal.codegen.CompilerConstants.JAVA_THIS;
import static org.openjdk.nashorn.internal.codegen.CompilerConstants.JS_OBJECT_DUAL_FIELD_PREFIX;
import static org.openjdk.nashorn.internal.codegen.CompilerConstants.JS_OBJECT_SINGLE_FIELD_PREFIX;
import static org.openjdk.nashorn.internal.codegen.CompilerConstants.className;
import static org.openjdk.nashorn.internal.codegen.CompilerConstants.constructorNoLookup;
import static org.openjdk.nashorn.internal.lookup.Lookup.MH;
import static org.openjdk.nashorn.internal.runtime.JSType.CONVERT_OBJECT;
import static org.openjdk.nashorn.internal.runtime.JSType.CONVERT_OBJECT_OPTIMISTIC;
import static org.openjdk.nashorn.internal.runtime.JSType.GET_UNDEFINED;
import static org.openjdk.nashorn.internal.runtime.JSType.TYPE_DOUBLE_INDEX;
import static org.openjdk.nashorn.internal.runtime.JSType.TYPE_INT_INDEX;
import static org.openjdk.nashorn.internal.runtime.JSType.TYPE_OBJECT_INDEX;
import static org.openjdk.nashorn.internal.runtime.JSType.TYPE_UNDEFINED_INDEX;
import static org.openjdk.nashorn.internal.runtime.JSType.getAccessorTypeIndex;
import static org.openjdk.nashorn.internal.runtime.UnwarrantedOptimismException.isValid;
import java.lang.invoke.MethodHandle;
import java.lang.invoke.MethodHandles;
import java.lang.invoke.MethodType;
import java.util.EnumSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import org.openjdk.nashorn.internal.codegen.ClassEmitter.Flag;
import org.openjdk.nashorn.internal.codegen.types.Type;
import org.openjdk.nashorn.internal.runtime.AccessorProperty;
import org.openjdk.nashorn.internal.runtime.AllocationStrategy;
import org.openjdk.nashorn.internal.runtime.Context;
import org.openjdk.nashorn.internal.runtime.FunctionScope;
import org.openjdk.nashorn.internal.runtime.JSType;
import org.openjdk.nashorn.internal.runtime.PropertyMap;
import org.openjdk.nashorn.internal.runtime.ScriptEnvironment;
import org.openjdk.nashorn.internal.runtime.ScriptObject;
import org.openjdk.nashorn.internal.runtime.Undefined;
import org.openjdk.nashorn.internal.runtime.UnwarrantedOptimismException;
import org.openjdk.nashorn.internal.runtime.logging.DebugLogger;
import org.openjdk.nashorn.internal.runtime.logging.Loggable;
import org.openjdk.nashorn.internal.runtime.logging.Logger;
/**
* Generates the ScriptObject subclass structure with fields for a user objects.
*/
@Logger(name="fields")
public final class ObjectClassGenerator implements Loggable {
/**
* Type guard to make sure we don't unnecessarily explode field storages. Rather unbox e.g.
* a java.lang.Number than blow up the field. Gradually, optimistic types should create almost
* no boxed types
*/
private static final MethodHandle IS_TYPE_GUARD = findOwnMH("isType", boolean.class, Class.class, Object.class);
/**
* Marker for scope parameters
*/
private static final String SCOPE_MARKER = "P";
/**
* Minimum number of extra fields in an object.
*/
static final int FIELD_PADDING = 4;
/**
* Debug field logger
* Should we print debugging information for fields when they are generated and getters/setters are called?
*/
private final DebugLogger log;
/** Field types for object-only fields */
private static final Type[] FIELD_TYPES_OBJECT = new Type[] { Type.OBJECT };
/** Field types for dual primitive/object fields */
private static final Type[] FIELD_TYPES_DUAL = new Type[] { Type.LONG, Type.OBJECT };
/** What type is the primitive type in dual representation */
public static final Type PRIMITIVE_FIELD_TYPE = Type.LONG;
private static final MethodHandle GET_DIFFERENT = findOwnMH("getDifferent", Object.class, Object.class, Class.class, MethodHandle.class, MethodHandle.class, int.class);
private static final MethodHandle GET_DIFFERENT_UNDEFINED = findOwnMH("getDifferentUndefined", Object.class, int.class);
private static boolean initialized = false;
/** The context */
private final Context context;
private final boolean dualFields;
/**
* Constructor
*
* @param context a context
* @param dualFields whether to use dual fields representation
*/
public ObjectClassGenerator(final Context context, final boolean dualFields) {
this.context = context;
this.dualFields = dualFields;
assert context != null;
this.log = initLogger(context);
if (!initialized) {
initialized = true;
if (!dualFields) {
log.warning("Running with object fields only - this is a deprecated configuration.");
}
}
}
@Override
public DebugLogger getLogger() {
return log;
}
@Override
public DebugLogger initLogger(final Context ctxt) {
return ctxt.getLogger(this.getClass());
}
/**
* Pack a number into a primitive long field
* @param n number object
* @return primitive long value with all the bits in the number
*/
public static long pack(final Number n) {
if (n instanceof Integer) {
return n.intValue();
} else if (n instanceof Long) {
return n.longValue();
} else if (n instanceof Double) {
return Double.doubleToRawLongBits(n.doubleValue());
}
throw new AssertionError("cannot pack" + n);
}
private static String getPrefixName(final boolean dualFields) {
return dualFields ? JS_OBJECT_DUAL_FIELD_PREFIX.symbolName() : JS_OBJECT_SINGLE_FIELD_PREFIX.symbolName();
}
private static String getPrefixName(final String className) {
if (className.startsWith(JS_OBJECT_DUAL_FIELD_PREFIX.symbolName())) {
return getPrefixName(true);
} else if (className.startsWith(JS_OBJECT_SINGLE_FIELD_PREFIX.symbolName())) {
return getPrefixName(false);
}
throw new AssertionError("Not a structure class: " + className);
}
/**
* Returns the class name for JavaScript objects with fieldCount fields.
*
* @param fieldCount Number of fields to allocate.
* @param dualFields whether to use dual fields representation
* @return The class name.
*/
public static String getClassName(final int fieldCount, final boolean dualFields) {
final String prefix = getPrefixName(dualFields);
return fieldCount != 0 ? SCRIPTS_PACKAGE + '/' + prefix + fieldCount :
SCRIPTS_PACKAGE + '/' + prefix;
}
/**
* Returns the class name for JavaScript scope with fieldCount fields and
* paramCount parameters.
*
* @param fieldCount Number of fields to allocate.
* @param paramCount Number of parameters to allocate
* @param dualFields whether to use dual fields representation
* @return The class name.
*/
public static String getClassName(final int fieldCount, final int paramCount, final boolean dualFields) {
return SCRIPTS_PACKAGE + '/' + getPrefixName(dualFields) + fieldCount + SCOPE_MARKER + paramCount;
}
/**
* Returns the number of fields in the JavaScript scope class. Its name had to be generated using either
* {@link #getClassName(int, boolean)} or {@link #getClassName(int, int, boolean)}.
* @param clazz the JavaScript scope class.
* @return the number of fields in the scope class.
*/
public static int getFieldCount(final Class<?> clazz) {
final String name = clazz.getSimpleName();
final String prefix = getPrefixName(name);
if (prefix.equals(name)) {
return 0;
}
final int scopeMarker = name.indexOf(SCOPE_MARKER);
return Integer.parseInt(scopeMarker == -1 ? name.substring(prefix.length()) : name.substring(prefix.length(), scopeMarker));
}
/**
* Returns the name of a field based on number and type.
*
* @param fieldIndex Ordinal of field.
* @param type Type of field.
*
* @return The field name.
*/
public static String getFieldName(final int fieldIndex, final Type type) {
return type.getDescriptor().substring(0, 1) + fieldIndex;
}
/**
* In the world of Object fields, we also have no undefined SwitchPoint, to reduce as much potential
* MethodHandle overhead as possible. In that case, we explicitly need to assign undefined to fields
* when we initialize them.
*
* @param init constructor to generate code in
* @param className name of class
* @param fieldNames fields to initialize to undefined, where applicable
*/
private void initializeToUndefined(final MethodEmitter init, final String className, final List<String> fieldNames) {
if (dualFields) {
// no need to initialize anything to undefined in the dual field world
// - then we have a constant getter for undefined for any unknown type
return;
}
if (fieldNames.isEmpty()) {
return;
}
init.load(Type.OBJECT, JAVA_THIS.slot());
init.loadUndefined(Type.OBJECT);
final Iterator<String> iter = fieldNames.iterator();
while (iter.hasNext()) {
final String fieldName = iter.next();
if (iter.hasNext()) {
init.dup2();
}
init.putField(className, fieldName, Type.OBJECT.getDescriptor());
}
}
/**
* Generate the byte codes for a JavaScript object class or scope.
* Class name is a function of number of fields and number of param
* fields
*
* @param descriptor Descriptor pulled from class name.
*
* @return Byte codes for generated class.
*/
public byte[] generate(final String descriptor) {
final String[] counts = descriptor.split(SCOPE_MARKER);
final int fieldCount = Integer.parseInt(counts[0]);
if (counts.length == 1) {
return generate(fieldCount);
}
final int paramCount = Integer.parseInt(counts[1]);
return generate(fieldCount, paramCount);
}
/**
* Generate the byte codes for a JavaScript object class with fieldCount fields.
*
* @param fieldCount Number of fields in the JavaScript object.
*
* @return Byte codes for generated class.
*/
public byte[] generate(final int fieldCount) {
final String className = getClassName(fieldCount, dualFields);
final String superName = className(ScriptObject.class);
final ClassEmitter classEmitter = newClassEmitter(className, superName);
addFields(classEmitter, fieldCount);
final MethodEmitter init = newInitMethod(classEmitter);
init.returnVoid();
init.end();
final MethodEmitter initWithSpillArrays = newInitWithSpillArraysMethod(classEmitter, ScriptObject.class);
initWithSpillArrays.returnVoid();
initWithSpillArrays.end();
newEmptyInit(className, classEmitter);
newAllocate(className, classEmitter);
return toByteArray(className, classEmitter);
}
/**
* Generate the byte codes for a JavaScript scope class with fieldCount fields
* and paramCount parameters.
*
* @param fieldCount Number of fields in the JavaScript scope.
* @param paramCount Number of parameters in the JavaScript scope
* .
* @return Byte codes for generated class.
*/
public byte[] generate(final int fieldCount, final int paramCount) {
final String className = getClassName(fieldCount, paramCount, dualFields);
final String superName = className(FunctionScope.class);
final ClassEmitter classEmitter = newClassEmitter(className, superName);
final List<String> initFields = addFields(classEmitter, fieldCount);
final MethodEmitter init = newInitScopeMethod(classEmitter);
initializeToUndefined(init, className, initFields);
init.returnVoid();
init.end();
final MethodEmitter initWithSpillArrays = newInitWithSpillArraysMethod(classEmitter, FunctionScope.class);
initializeToUndefined(initWithSpillArrays, className, initFields);
initWithSpillArrays.returnVoid();
initWithSpillArrays.end();
final MethodEmitter initWithArguments = newInitScopeWithArgumentsMethod(classEmitter);
initializeToUndefined(initWithArguments, className, initFields);
initWithArguments.returnVoid();
initWithArguments.end();
return toByteArray(className, classEmitter);
}
/**
* Generates the needed fields.
*
* @param classEmitter Open class emitter.
* @param fieldCount Number of fields.
*
* @return List fields that need to be initialized.
*/
private List<String> addFields(final ClassEmitter classEmitter, final int fieldCount) {
final List<String> initFields = new LinkedList<>();
final Type[] fieldTypes = dualFields ? FIELD_TYPES_DUAL : FIELD_TYPES_OBJECT;
for (int i = 0; i < fieldCount; i++) {
for (final Type type : fieldTypes) {
final String fieldName = getFieldName(i, type);
classEmitter.field(fieldName, type.getTypeClass());
if (type == Type.OBJECT) {
initFields.add(fieldName);
}
}
}
return initFields;
}
/**
* Allocate and initialize a new class emitter.
*
* @param className Name of JavaScript class.
*
* @return Open class emitter.
*/
private ClassEmitter newClassEmitter(final String className, final String superName) {
final ClassEmitter classEmitter = new ClassEmitter(context, className, superName);
classEmitter.begin();
return classEmitter;
}
/**
* Allocate and initialize a new <init> method.
*
* @param classEmitter Open class emitter.
*
* @return Open method emitter.
*/
private static MethodEmitter newInitMethod(final ClassEmitter classEmitter) {
final MethodEmitter init = classEmitter.init(PropertyMap.class);
init.begin();
init.load(Type.OBJECT, JAVA_THIS.slot());
init.load(Type.OBJECT, INIT_MAP.slot());
init.invoke(constructorNoLookup(ScriptObject.class, PropertyMap.class));
return init;
}
private static MethodEmitter newInitWithSpillArraysMethod(final ClassEmitter classEmitter, final Class<?> superClass) {
final MethodEmitter init = classEmitter.init(PropertyMap.class, long[].class, Object[].class);
init.begin();
init.load(Type.OBJECT, JAVA_THIS.slot());
init.load(Type.OBJECT, INIT_MAP.slot());
init.load(Type.LONG_ARRAY, 2);
init.load(Type.OBJECT_ARRAY, 3);
init.invoke(constructorNoLookup(superClass, PropertyMap.class, long[].class, Object[].class));
return init;
}
/**
* Allocate and initialize a new <init> method for scopes.
* @param classEmitter Open class emitter.
* @return Open method emitter.
*/
private static MethodEmitter newInitScopeMethod(final ClassEmitter classEmitter) {
final MethodEmitter init = classEmitter.init(PropertyMap.class, ScriptObject.class);
init.begin();
init.load(Type.OBJECT, JAVA_THIS.slot());
init.load(Type.OBJECT, INIT_MAP.slot());
init.load(Type.OBJECT, INIT_SCOPE.slot());
init.invoke(constructorNoLookup(FunctionScope.class, PropertyMap.class, ScriptObject.class));
return init;
}
/**
* Allocate and initialize a new <init> method for scopes with arguments.
* @param classEmitter Open class emitter.
* @return Open method emitter.
*/
private static MethodEmitter newInitScopeWithArgumentsMethod(final ClassEmitter classEmitter) {
final MethodEmitter init = classEmitter.init(PropertyMap.class, ScriptObject.class, ScriptObject.class);
init.begin();
init.load(Type.OBJECT, JAVA_THIS.slot());
init.load(Type.OBJECT, INIT_MAP.slot());
init.load(Type.OBJECT, INIT_SCOPE.slot());
init.load(Type.OBJECT, INIT_ARGUMENTS.slot());
init.invoke(constructorNoLookup(FunctionScope.class, PropertyMap.class, ScriptObject.class, ScriptObject.class));
return init;
}
/**
* Add an empty <init> method to the JavaScript class.
*
* @param classEmitter Open class emitter.
* @param className Name of JavaScript class.
*/
private static void newEmptyInit(final String className, final ClassEmitter classEmitter) {
final MethodEmitter emptyInit = classEmitter.init();
emptyInit.begin();
emptyInit.load(Type.OBJECT, JAVA_THIS.slot());
emptyInit.loadNull();
emptyInit.invoke(constructorNoLookup(className, PropertyMap.class));
emptyInit.returnVoid();
emptyInit.end();
}
/**
* Add an empty <init> method to the JavaScript class.
*
* @param classEmitter Open class emitter.
* @param className Name of JavaScript class.
*/
private static void newAllocate(final String className, final ClassEmitter classEmitter) {
final MethodEmitter allocate = classEmitter.method(EnumSet.of(Flag.PUBLIC, Flag.STATIC), ALLOCATE.symbolName(), ScriptObject.class, PropertyMap.class);
allocate.begin();
allocate._new(className, Type.typeFor(ScriptObject.class));
allocate.dup();
allocate.load(Type.typeFor(PropertyMap.class), 0);
allocate.invoke(constructorNoLookup(className, PropertyMap.class));
allocate._return();
allocate.end();
}
/**
* Collects the byte codes for a generated JavaScript class.
*
* @param classEmitter Open class emitter.
* @return Byte codes for the class.
*/
private byte[] toByteArray(final String className, final ClassEmitter classEmitter) {
classEmitter.end();
final byte[] code = classEmitter.toByteArray();
final ScriptEnvironment env = context.getEnv();
DumpBytecode.dumpBytecode(env, log, code, className);
if (env._verify_code) {
context.verify(code);
}
return code;
}
/** Double to long bits, used with --dual-fields for primitive double values */
public static final MethodHandle PACK_DOUBLE =
MH.explicitCastArguments(MH.findStatic(MethodHandles.publicLookup(), Double.class, "doubleToRawLongBits", MH.type(long.class, double.class)), MH.type(long.class, double.class));
/** double bits to long, used with --dual-fields for primitive double values */
public static final MethodHandle UNPACK_DOUBLE =
MH.findStatic(MethodHandles.publicLookup(), Double.class, "longBitsToDouble", MH.type(double.class, long.class));
//type != forType, so use the correct getter for forType, box it and throw
@SuppressWarnings("unused")
private static Object getDifferent(final Object receiver, final Class<?> forType, final MethodHandle primitiveGetter, final MethodHandle objectGetter, final int programPoint) {
//create the sametype getter, and upcast to value. no matter what the store format is,
//
final MethodHandle sameTypeGetter = getterForType(forType, primitiveGetter, objectGetter);
final MethodHandle mh = MH.asType(sameTypeGetter, sameTypeGetter.type().changeReturnType(Object.class));
try {
final Object value = mh.invokeExact(receiver);
throw new UnwarrantedOptimismException(value, programPoint);
} catch (final Error | RuntimeException e) {
throw e;
} catch (final Throwable e) {
throw new RuntimeException(e);
}
}
@SuppressWarnings("unused")
private static Object getDifferentUndefined(final int programPoint) {
throw new UnwarrantedOptimismException(Undefined.getUndefined(), programPoint);
}
private static MethodHandle getterForType(final Class<?> forType, final MethodHandle primitiveGetter, final MethodHandle objectGetter) {
switch (getAccessorTypeIndex(forType)) {
case TYPE_INT_INDEX:
return MH.explicitCastArguments(primitiveGetter, primitiveGetter.type().changeReturnType(int.class));
case TYPE_DOUBLE_INDEX:
return MH.filterReturnValue(primitiveGetter, UNPACK_DOUBLE);
case TYPE_OBJECT_INDEX:
return objectGetter;
default:
throw new AssertionError(forType);
}
}
//no optimism here. we do unconditional conversion to types
private static MethodHandle createGetterInner(final Class<?> forType, final Class<?> type, final MethodHandle primitiveGetter, final MethodHandle objectGetter, final List<MethodHandle> converters, final int programPoint) {
final int fti = forType == null ? TYPE_UNDEFINED_INDEX : getAccessorTypeIndex(forType);
final int ti = getAccessorTypeIndex(type);
//this means fail if forType != type
final boolean isOptimistic = converters == CONVERT_OBJECT_OPTIMISTIC;
final boolean isPrimitiveStorage = forType != null && forType.isPrimitive();
//which is the primordial getter
final MethodHandle getter = primitiveGetter == null ? objectGetter : isPrimitiveStorage ? primitiveGetter : objectGetter;
if (forType == null) {
if (isOptimistic) {
//return undefined if asking for object. otherwise throw UnwarrantedOptimismException
if (ti == TYPE_OBJECT_INDEX) {
return MH.dropArguments(GET_UNDEFINED.get(TYPE_OBJECT_INDEX), 0, Object.class);
}
//throw exception
return MH.asType(
MH.dropArguments(
MH.insertArguments(
GET_DIFFERENT_UNDEFINED,
0,
programPoint),
0,
Object.class),
getter.type().changeReturnType(type));
}
//return an undefined and coerce it to the appropriate type
return MH.dropArguments(GET_UNDEFINED.get(ti), 0, Object.class);
}
assert primitiveGetter != null || forType == Object.class : forType;
if (isOptimistic) {
if (fti < ti) {
//asking for a wider type than currently stored. then it's OK to coerce.
//e.g. stored as int, ask for long or double
//e.g. stored as long, ask for double
assert fti != TYPE_UNDEFINED_INDEX;
final MethodHandle tgetter = getterForType(forType, primitiveGetter, objectGetter);
return MH.asType(tgetter, tgetter.type().changeReturnType(type));
} else if (fti == ti) {
//Fast path, never throw exception - exact getter, just unpack if needed
return getterForType(forType, primitiveGetter, objectGetter);
} else {
assert fti > ti;
//if asking for a narrower type than the storage - throw exception
//unless FTI is object, in that case we have to go through the converters
//there is no
if (fti == TYPE_OBJECT_INDEX) {
return MH.filterReturnValue(
objectGetter,
MH.insertArguments(
converters.get(ti),
1,
programPoint));
}
//asking for narrower primitive than we have stored, that is an
//UnwarrantedOptimismException
return MH.asType(
MH.filterArguments(
objectGetter,
0,
MH.insertArguments(
GET_DIFFERENT,
1,
forType,
primitiveGetter,
objectGetter,
programPoint)),
objectGetter.type().changeReturnType(type));
}
}
assert !isOptimistic;
// freely coerce the result to whatever you asked for, this is e.g. Object->int for a & b
final MethodHandle tgetter = getterForType(forType, primitiveGetter, objectGetter);
if (fti == TYPE_OBJECT_INDEX) {
if (fti != ti) {
return MH.filterReturnValue(tgetter, CONVERT_OBJECT.get(ti));
}
return tgetter;
}
assert primitiveGetter != null;
final MethodType tgetterType = tgetter.type();
switch (fti) {
case TYPE_INT_INDEX: {
return MH.asType(tgetter, tgetterType.changeReturnType(type));
}
case TYPE_DOUBLE_INDEX:
switch (ti) {
case TYPE_INT_INDEX:
return MH.filterReturnValue(tgetter, JSType.TO_INT32_D.methodHandle);
case TYPE_DOUBLE_INDEX:
assert tgetterType.returnType() == double.class;
return tgetter;
default:
return MH.asType(tgetter, tgetterType.changeReturnType(Object.class));
}
default:
throw new UnsupportedOperationException(forType + "=>" + type);
}
}
/**
* Given a primitiveGetter (optional for non dual fields) and an objectSetter that retrieve
* the primitive and object version of a field respectively, return one with the correct
* method type and the correct filters. For example, if the value is stored as a double
* and we want an Object getter, in the dual fields world we'd pick the primitiveGetter,
* which reads a long, use longBitsToDouble on the result to unpack it, and then change the
* return type to Object, boxing it. In the objects only world there are only object fields,
* primitives are boxed when asked for them and we don't need to bother with primitive encoding
* (or even undefined, which if forType==null) representation, so we just return whatever is
* in the object field. The object field is always initiated to Undefined, so here, where we have
* the representation for Undefined in all our bits, this is not a problem.
* <p>
* Representing undefined in a primitive is hard, for an int there aren't enough bits, for a long
* we could limit the width of a representation, and for a double (as long as it is stored as long,
* as all NaNs will turn into QNaN on ia32, which is one bit pattern, we should use a special NaN).
* Naturally we could have special undefined values for all types which mean "go look in a wider field",
* but the guards needed on every getter took too much time.
* <p>
* To see how this is used, look for example in {@link AccessorProperty#getGetter}
* <p>
* @param forType representation of the underlying type in the field, null if undefined
* @param type type to retrieve it as
* @param primitiveGetter getter to read the primitive version of this field (null if Objects Only)
* @param objectGetter getter to read the object version of this field
* @param programPoint program point for getter, if program point is INVALID_PROGRAM_POINT, then this is not an optimistic getter
*
* @return getter for the given representation that returns the given type
*/
public static MethodHandle createGetter(final Class<?> forType, final Class<?> type, final MethodHandle primitiveGetter, final MethodHandle objectGetter, final int programPoint) {
return createGetterInner(
forType,
type,
primitiveGetter,
objectGetter,
isValid(programPoint) ? CONVERT_OBJECT_OPTIMISTIC : CONVERT_OBJECT,
programPoint);
}
/**
* This is similar to the {@link ObjectClassGenerator#createGetter} function. Performs
* the necessary operations to massage a setter operand of type {@code type} to
* fit into the primitive field (if primitive and dual fields is enabled) or into
* the object field (box if primitive and dual fields is disabled)
*
* @param forType representation of the underlying object
* @param type representation of field to write, and setter signature
* @param primitiveSetter setter that writes to the primitive field (null if Objects Only)
* @param objectSetter setter that writes to the object field
*
* @return the setter for the given representation that takes a {@code type}
*/
public static MethodHandle createSetter(final Class<?> forType, final Class<?> type, final MethodHandle primitiveSetter, final MethodHandle objectSetter) {
assert forType != null;
final int fti = getAccessorTypeIndex(forType);
final int ti = getAccessorTypeIndex(type);
if (fti == TYPE_OBJECT_INDEX || primitiveSetter == null) {
if (ti == TYPE_OBJECT_INDEX) {
return objectSetter;
}
return MH.asType(objectSetter, objectSetter.type().changeParameterType(1, type));
}
final MethodType pmt = primitiveSetter.type();
switch (fti) {
case TYPE_INT_INDEX:
switch (ti) {
case TYPE_INT_INDEX:
return MH.asType(primitiveSetter, pmt.changeParameterType(1, int.class));
case TYPE_DOUBLE_INDEX:
return MH.filterArguments(primitiveSetter, 1, PACK_DOUBLE);
default:
return objectSetter;
}
case TYPE_DOUBLE_INDEX:
if (ti == TYPE_OBJECT_INDEX) {
return objectSetter;
}
return MH.asType(MH.filterArguments(primitiveSetter, 1, PACK_DOUBLE), pmt.changeParameterType(1, type));
default:
throw new UnsupportedOperationException(forType + "=>" + type);
}
}
@SuppressWarnings("unused")
private static boolean isType(final Class<?> boxedForType, final Object x) {
return x != null && x.getClass() == boxedForType;
}
private static Class<? extends Number> getBoxedType(final Class<?> forType) {
if (forType == int.class) {
return Integer.class;
}
if (forType == long.class) {
return Long.class;
}
if (forType == double.class) {
return Double.class;
}
assert false;
return null;
}
/**
* If we are setting boxed types (because the compiler couldn't determine which they were) to
* a primitive field, we can reuse the primitive field getter, as long as we are setting an element
* of the same boxed type as the primitive type representation
*
* @param forType the current type
* @param primitiveSetter primitive setter for the current type with an element of the current type
* @param objectSetter the object setter
*
* @return method handle that checks if the element to be set is of the current type, even though it's boxed
* and instead of using the generic object setter, that would blow up the type and invalidate the map,
* unbox it and call the primitive setter instead
*/
public static MethodHandle createGuardBoxedPrimitiveSetter(final Class<?> forType, final MethodHandle primitiveSetter, final MethodHandle objectSetter) {
final Class<? extends Number> boxedForType = getBoxedType(forType);
//object setter that checks for primitive if current type is primitive
return MH.guardWithTest(
MH.insertArguments(
MH.dropArguments(
IS_TYPE_GUARD,
1,
Object.class),
0,
boxedForType),
MH.asType(
primitiveSetter,
objectSetter.type()),
objectSetter);
}
/**
* Add padding to field count to avoid creating too many classes and have some spare fields
* @param count the field count
* @return the padded field count
*/
static int getPaddedFieldCount(final int count) {
return count / FIELD_PADDING * FIELD_PADDING + FIELD_PADDING;
}
private static MethodHandle findOwnMH(final String name, final Class<?> rtype, final Class<?>... types) {
return MH.findStatic(MethodHandles.lookup(), ObjectClassGenerator.class, name, MH.type(rtype, types));
}
/**
* Creates the allocator class name and property map for a constructor function with the specified
* number of "this" properties that it initializes.
* @param thisProperties number of properties assigned to "this"
* @return the allocation strategy
*/
static AllocationStrategy createAllocationStrategy(final int thisProperties, final boolean dualFields) {
final int paddedFieldCount = getPaddedFieldCount(thisProperties);
return new AllocationStrategy(paddedFieldCount, dualFields);
}
}
|
apache/tinkerpop | 35,132 | gremlin-core/src/test/java/org/apache/tinkerpop/gremlin/util/NumberHelperTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.tinkerpop.gremlin.util;
import org.apache.tinkerpop.gremlin.process.traversal.N;
import org.javatuples.Quartet;
import org.javatuples.Triplet;
import org.junit.Test;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.math.RoundingMode;
import java.util.Arrays;
import java.util.List;
import static org.apache.tinkerpop.gremlin.util.NumberHelper.add;
import static org.apache.tinkerpop.gremlin.util.NumberHelper.compare;
import static org.apache.tinkerpop.gremlin.util.NumberHelper.div;
import static org.apache.tinkerpop.gremlin.util.NumberHelper.getHighestCommonNumberClass;
import static org.apache.tinkerpop.gremlin.util.NumberHelper.getHighestCommonNumberInfo;
import static org.apache.tinkerpop.gremlin.util.NumberHelper.max;
import static org.apache.tinkerpop.gremlin.util.NumberHelper.min;
import static org.apache.tinkerpop.gremlin.util.NumberHelper.mul;
import static org.apache.tinkerpop.gremlin.util.NumberHelper.sub;
import static org.apache.tinkerpop.gremlin.util.NumberHelper.NumberInfo;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
/**
* @author Daniel Kuppitz (http://gremlin.guru)
*/
public class NumberHelperTest {
private final static List<Number> EACH_NUMBER_TYPE = Arrays.asList(
(byte) 1, (short) 1, 1, 1L, 1F, 1D, BigInteger.ONE, BigDecimal.ONE
);
private final static List<Triplet<Number, Integer, Boolean>> NUMBER_INFO_CASES = Arrays.asList(
new Triplet<>((byte)1, 8, false),
new Triplet<>((short)1, 16, false),
new Triplet<>(1, 32, false),
new Triplet<>(1L, 64, false),
new Triplet<>(BigInteger.ONE, 128, false),
new Triplet<>(1F, 32, true),
new Triplet<>(1D, 64, true),
new Triplet<>(BigDecimal.ONE, 128, true)
);
private final static List<Quartet<Number, Number, Class<? extends Number>, Class<? extends Number>>> COMMON_NUMBER_CLASSES =
Arrays.asList(
// BYTE
new Quartet<>((byte) 1, (byte) 1, Byte.class, Float.class),
new Quartet<>((byte) 1, (short) 1, Short.class, Float.class),
new Quartet<>((byte) 1, 1, Integer.class, Float.class),
new Quartet<>((byte) 1, 1L, Long.class, Double.class),
new Quartet<>((byte) 1, 1F, Float.class, Float.class),
new Quartet<>((byte) 1, 1D, Double.class, Double.class),
new Quartet<>((byte) 1, BigInteger.ONE, BigInteger.class, BigDecimal.class),
new Quartet<>((byte) 1, BigDecimal.ONE, BigDecimal.class, BigDecimal.class),
// SHORT
new Quartet<>((short) 1, (short) 1, Short.class, Float.class),
new Quartet<>((short) 1, 1, Integer.class, Float.class),
new Quartet<>((short) 1, 1L, Long.class, Double.class),
new Quartet<>((short) 1, 1F, Float.class, Float.class),
new Quartet<>((short) 1, 1D, Double.class, Double.class),
new Quartet<>((short) 1, BigInteger.ONE, BigInteger.class, BigDecimal.class),
new Quartet<>((short) 1, BigDecimal.ONE, BigDecimal.class, BigDecimal.class),
// INTEGER
new Quartet<>(1, 1, Integer.class, Float.class),
new Quartet<>(1, 1L, Long.class, Double.class),
new Quartet<>(1, 1F, Float.class, Float.class),
new Quartet<>(1, 1D, Double.class, Double.class),
new Quartet<>(1, BigInteger.ONE, BigInteger.class, BigDecimal.class),
new Quartet<>(1, BigDecimal.ONE, BigDecimal.class, BigDecimal.class),
// LONG
new Quartet<>(1L, 1L, Long.class, Double.class),
new Quartet<>(1L, 1F, Double.class, Double.class),
new Quartet<>(1L, 1D, Double.class, Double.class),
new Quartet<>(1L, BigInteger.ONE, BigInteger.class, BigDecimal.class),
new Quartet<>(1L, BigDecimal.ONE, BigDecimal.class, BigDecimal.class),
// FLOAT
new Quartet<>(1F, 1F, Float.class, Float.class),
new Quartet<>(1F, 1D, Double.class, Double.class),
new Quartet<>(1F, BigInteger.ONE, BigDecimal.class, BigDecimal.class),
new Quartet<>(1F, BigDecimal.ONE, BigDecimal.class, BigDecimal.class),
// DOUBLE
new Quartet<>(1D, 1D, Double.class, Double.class),
new Quartet<>(1D, BigInteger.ONE, BigDecimal.class, BigDecimal.class),
new Quartet<>(1D, BigDecimal.ONE, BigDecimal.class, BigDecimal.class),
// BIG INTEGER
new Quartet<>(BigInteger.ONE, BigInteger.ONE, BigInteger.class, BigDecimal.class),
new Quartet<>(BigInteger.ONE, BigDecimal.ONE, BigDecimal.class, BigDecimal.class),
// BIG DECIMAL
new Quartet<>(BigDecimal.ONE, BigDecimal.ONE, BigDecimal.class, BigDecimal.class)
);
private final static List<Triplet<Number, Number, String>> OVERFLOW_CASES = Arrays.asList(
new Triplet<>(Long.MAX_VALUE, 1L, "add"),
new Triplet<>(Long.MIN_VALUE, 1L, "sub"),
new Triplet<>(Long.MIN_VALUE, -1L, "mul"),
new Triplet<>(Long.MIN_VALUE, -1L, "div"),
new Triplet<>(Long.MAX_VALUE, Integer.MAX_VALUE, "mul")
);
private final static List<Quartet<Number, Number, String, String>> NO_OVERFLOW_CASES = Arrays.asList(
new Quartet<>(Byte.MIN_VALUE, (byte)-1, "div", "s"),
new Quartet<>(Byte.MAX_VALUE, (byte)100, "add", "s"),
new Quartet<>(Byte.MIN_VALUE, (byte)100, "sub", "s"),
new Quartet<>((byte)100, (byte)100, "mul", "s"),
new Quartet<>(Byte.MAX_VALUE, 0.5f, "div", "f"),
new Quartet<>(Short.MIN_VALUE, (short)-1, "div", "i"),
new Quartet<>(Short.MAX_VALUE, (short)100, "add", "i"),
new Quartet<>(Short.MIN_VALUE, (short)100, "sub", "i"),
new Quartet<>(Short.MAX_VALUE, (short)100, "mul", "i"),
new Quartet<>(Short.MAX_VALUE, 0.5f, "div", "f"),
new Quartet<>(Integer.MIN_VALUE, -1, "div", "l"),
new Quartet<>(Integer.MAX_VALUE, 1, "add", "l"),
new Quartet<>(Integer.MIN_VALUE, 1, "sub", "l"),
new Quartet<>(Integer.MAX_VALUE, Integer.MAX_VALUE, "mul", "l"),
new Quartet<>(Integer.MAX_VALUE, 0.5f, "div", "f"),
new Quartet<>(Long.MAX_VALUE, 0.5f, "div", "d")
);
@Test
public void shouldReturnHighestCommonNumberNumberInfo() {
for (final Triplet<Number, Integer, Boolean> q : NUMBER_INFO_CASES) {
NumberInfo numberInfo = getHighestCommonNumberInfo(false, q.getValue0(), q.getValue0());
assertEquals(numberInfo.getBits(), (int)q.getValue1());
assertEquals(numberInfo.getFp(), (boolean)q.getValue2());
}
}
@Test
public void shouldReturnHighestCommonNumberClass() {
for (final Quartet<Number, Number, Class<? extends Number>, Class<? extends Number>> q : COMMON_NUMBER_CLASSES) {
assertEquals(q.getValue2(), getHighestCommonNumberClass(q.getValue0(), q.getValue1()));
assertEquals(q.getValue2(), getHighestCommonNumberClass(q.getValue1(), q.getValue0()));
assertEquals(q.getValue3(), getHighestCommonNumberClass(true, q.getValue0(), q.getValue1()));
assertEquals(q.getValue3(), getHighestCommonNumberClass(true, q.getValue1(), q.getValue0()));
}
// Double.NaN and null are not numbers and thus should be ignored
for (final Number number : EACH_NUMBER_TYPE) {
assertEquals(number.getClass(), getHighestCommonNumberClass(number, Double.NaN));
assertEquals(number.getClass(), getHighestCommonNumberClass(Double.NaN, number));
assertEquals(number.getClass(), getHighestCommonNumberClass(number, null));
assertEquals(number.getClass(), getHighestCommonNumberClass(null, number));
}
}
@Test
public void shouldHandleAllNullInput() {
assertNull(add(null, null));
assertNull(sub(null, null));
assertNull(mul(null, null));
assertNull(div(null, null));
assertNull(max((Number) null, null));
assertNull(max((Comparable) null, null));
assertNull(min((Number) null, null));
assertNull(min((Comparable) null, null));
assertEquals(0, compare(null, null).intValue());
}
@Test
public void shouldAddAndReturnCorrectType() {
assertEquals((byte) 1, add((byte) 1, (Byte) null));
assertNull(add((Byte) null, (byte) 1));
// BYTE
assertEquals((byte) 2, add((byte) 1, (byte) 1));
assertEquals((short) 2, add((byte) 1, (short) 1));
assertEquals(2, add((byte) 1, 1));
assertEquals(2L, add((byte) 1, 1L));
assertEquals(2F, add((byte) 1, 1F));
assertEquals(2D, add((byte) 1, 1D));
assertEquals(BigInteger.ONE.add(BigInteger.ONE), add((byte) 1, BigInteger.ONE));
assertEquals(BigDecimal.ONE.add(BigDecimal.ONE), add((byte) 1, BigDecimal.ONE));
// SHORT
assertEquals((short)2, add((short) 1, (short) 1));
assertEquals(2, add((short) 1, 1));
assertEquals(2L, add((short) 1, 1L));
assertEquals(2F, add((short) 1, 1F));
assertEquals(2D, add((short) 1, 1D));
assertEquals(BigInteger.ONE.add(BigInteger.ONE), add((short) 1, BigInteger.ONE));
assertEquals(BigDecimal.ONE.add(BigDecimal.ONE), add((short) 1, BigDecimal.ONE));
// INTEGER
assertEquals(2, add(1, 1));
assertEquals(2L, add(1, 1L));
assertEquals(2F, add(1, 1F));
assertEquals(2D, add(1, 1D));
assertEquals(BigInteger.ONE.add(BigInteger.ONE), add(1, BigInteger.ONE));
assertEquals(BigDecimal.ONE.add(BigDecimal.ONE), add(1, BigDecimal.ONE));
// LONG
assertEquals(2L, add(1L, 1L));
assertEquals(2D, add(1L, 1F));
assertEquals(2D, add(1L, 1D));
assertEquals(BigInteger.ONE.add(BigInteger.ONE), add(1L, BigInteger.ONE));
assertEquals(BigDecimal.ONE.add(BigDecimal.ONE), add(1L, BigDecimal.ONE));
// FLOAT
assertEquals(2F, add(1F, 1F));
assertEquals(2D, add(1F, 1D));
assertEquals(BigDecimal.ONE.add(BigDecimal.ONE).setScale(1, RoundingMode.HALF_UP), add(1F, BigInteger.ONE));
assertEquals(BigDecimal.ONE.add(BigDecimal.ONE).setScale(1, RoundingMode.HALF_UP), add(1F, BigDecimal.ONE));
// DOUBLE
assertEquals(2D, add(1D, 1D));
assertEquals(BigDecimal.ONE.add(BigDecimal.ONE).setScale(1, RoundingMode.HALF_UP), add(1D, BigInteger.ONE));
assertEquals(BigDecimal.ONE.add(BigDecimal.ONE).setScale(1, RoundingMode.HALF_UP), add(1D, BigDecimal.ONE));
// BIG INTEGER
assertEquals(BigInteger.ONE.add(BigInteger.ONE), add(BigInteger.ONE, BigInteger.ONE));
assertEquals(BigDecimal.ONE.add(BigDecimal.ONE), add(BigInteger.ONE, BigDecimal.ONE));
// BIG DECIMAL
assertEquals(BigDecimal.ONE.add(BigDecimal.ONE), add(BigDecimal.ONE, BigDecimal.ONE));
}
@Test
public void shouldSubtractAndReturnCorrectType() {
assertEquals((byte) 1, sub((byte) 1, (Byte) null));
assertNull(sub((Byte) null, (byte) 1));
// BYTE
assertEquals((byte) 0, sub((byte) 1, (byte) 1));
assertEquals((short) 0, sub((byte) 1, (short) 1));
assertEquals(0, sub((byte) 1, 1));
assertEquals(0L, sub((byte) 1, 1L));
assertEquals(0F, sub((byte) 1, 1F));
assertEquals(0D, sub((byte) 1, 1D));
assertEquals(BigInteger.ZERO, sub((byte) 1, BigInteger.ONE));
assertEquals(BigDecimal.ZERO, sub((byte) 1, BigDecimal.ONE));
// SHORT
assertEquals((short) 0, sub((short) 1, (short) 1));
assertEquals(0, sub((short) 1, 1));
assertEquals(0L, sub((short) 1, 1L));
assertEquals(0F, sub((short) 1, 1F));
assertEquals(0D, sub((short) 1, 1D));
assertEquals(BigInteger.ZERO, sub((short) 1, BigInteger.ONE));
assertEquals(BigDecimal.ZERO, sub((short) 1, BigDecimal.ONE));
// INTEGER
assertEquals(0, sub(1, 1));
assertEquals(0L, sub(1, 1L));
assertEquals(0F, sub(1, 1F));
assertEquals(0D, sub(1, 1D));
assertEquals(BigInteger.ZERO, sub(1, BigInteger.ONE));
assertEquals(BigDecimal.ZERO, sub(1, BigDecimal.ONE));
// LONG
assertEquals(0L, sub(1L, 1L));
assertEquals(0D, sub(1L, 1F));
assertEquals(0D, sub(1L, 1D));
assertEquals(BigInteger.ZERO, sub(1L, BigInteger.ONE));
assertEquals(BigDecimal.ZERO, sub(1L, BigDecimal.ONE));
// FLOAT
assertEquals(0F, sub(1F, 1F));
assertEquals(0D, sub(1F, 1D));
assertEquals(BigDecimal.ZERO.setScale(1, RoundingMode.HALF_UP), sub(1F, BigInteger.ONE));
assertEquals(BigDecimal.ZERO.setScale(1, RoundingMode.HALF_UP), sub(1F, BigDecimal.ONE));
// DOUBLE
assertEquals(0D, sub(1D, 1D));
assertEquals(BigDecimal.ZERO.setScale(1, RoundingMode.HALF_UP), sub(1D, BigInteger.ONE));
assertEquals(BigDecimal.ZERO.setScale(1, RoundingMode.HALF_UP), sub(1D, BigDecimal.ONE));
// BIG INTEGER
assertEquals(BigInteger.ZERO, sub(BigInteger.ONE, BigInteger.ONE));
assertEquals(BigDecimal.ZERO, sub(BigInteger.ONE, BigDecimal.ONE));
// BIG DECIMAL
assertEquals(BigDecimal.ZERO, sub(BigDecimal.ONE, BigDecimal.ONE));
}
@Test
public void shouldMultiplyAndReturnCorrectType() {
assertEquals((byte) 1, mul((byte) 1, (Byte) null));
assertNull(mul((Byte) null, (byte) 1));
// BYTE
assertEquals((byte) 1, mul((byte) 1, (byte) 1));
assertEquals((short) 1, mul((byte) 1, (short) 1));
assertEquals(1, mul((byte) 1, 1));
assertEquals(1L, mul((byte) 1, 1L));
assertEquals(1F, mul((byte) 1, 1F));
assertEquals(1D, mul((byte) 1, 1D));
assertEquals(BigInteger.ONE, mul((byte) 1, BigInteger.ONE));
assertEquals(BigDecimal.ONE, mul((byte) 1, BigDecimal.ONE));
// SHORT
assertEquals((short) 1, mul((short) 1, (short) 1));
assertEquals(1, mul((short) 1, 1));
assertEquals(1L, mul((short) 1, 1L));
assertEquals(1F, mul((short) 1, 1F));
assertEquals(1D, mul((short) 1, 1D));
assertEquals(BigInteger.ONE, mul((short) 1, BigInteger.ONE));
assertEquals(BigDecimal.ONE, mul((short) 1, BigDecimal.ONE));
// INTEGER
assertEquals(1, mul(1, 1));
assertEquals(1L, mul(1, 1L));
assertEquals(1F, mul(1, 1F));
assertEquals(1D, mul(1, 1D));
assertEquals(BigInteger.ONE, mul(1, BigInteger.ONE));
assertEquals(BigDecimal.ONE, mul(1, BigDecimal.ONE));
// LONG
assertEquals(1L, mul(1L, 1L));
assertEquals(1D, mul(1L, 1F));
assertEquals(1D, mul(1L, 1D));
assertEquals(BigInteger.ONE, mul(1L, BigInteger.ONE));
assertEquals(BigDecimal.ONE, mul(1L, BigDecimal.ONE));
// FLOAT
assertEquals(1F, mul(1F, 1F));
assertEquals(1D, mul(1F, 1D));
assertEquals(BigDecimal.ONE.setScale(1, RoundingMode.HALF_UP), mul(1F, BigInteger.ONE));
assertEquals(BigDecimal.ONE.setScale(1, RoundingMode.HALF_UP), mul(1F, BigDecimal.ONE));
// DOUBLE
assertEquals(1D, mul(1D, 1D));
assertEquals(BigDecimal.ONE.setScale(1, RoundingMode.HALF_UP), mul(1D, BigInteger.ONE));
assertEquals(BigDecimal.ONE.setScale(1, RoundingMode.HALF_UP), mul(1D, BigDecimal.ONE));
// BIG INTEGER
assertEquals(BigInteger.ONE, mul(BigInteger.ONE, BigInteger.ONE));
assertEquals(BigDecimal.ONE, mul(BigInteger.ONE, BigDecimal.ONE));
// BIG DECIMAL
assertEquals(BigDecimal.ONE, mul(BigDecimal.ONE, BigDecimal.ONE));
}
@Test
public void shouldDivideAndReturnCorrectType() {
assertEquals((byte) 1, div((byte) 1, (Byte) null));
assertNull(div((Byte) null, (byte) 1));
// BYTE
assertEquals((byte) 1, div((byte) 1, (byte) 1));
assertEquals((short) 1, div((byte) 1, (short) 1));
assertEquals(1, div((byte) 1, 1));
assertEquals(1L, div((byte) 1, 1L));
assertEquals(1F, div((byte) 1, 1F));
assertEquals(1D, div((byte) 1, 1D));
assertEquals(BigInteger.ONE, div((byte) 1, BigInteger.ONE));
assertEquals(BigDecimal.ONE, div((byte) 1, BigDecimal.ONE));
// SHORT
assertEquals((short) 1, div((short) 1, (short) 1));
assertEquals(1, div((short) 1, 1));
assertEquals(1L, div((short) 1, 1L));
assertEquals(1F, div((short) 1, 1F));
assertEquals(1D, div((short) 1, 1D));
assertEquals(BigInteger.ONE, div((short) 1, BigInteger.ONE));
assertEquals(BigDecimal.ONE, div((short) 1, BigDecimal.ONE));
// INTEGER
assertEquals(1, div(1, 1));
assertEquals(1L, div(1, 1L));
assertEquals(1F, div(1, 1F));
assertEquals(1D, div(1, 1D));
assertEquals(BigInteger.ONE, div(1, BigInteger.ONE));
assertEquals(BigDecimal.ONE, div(1, BigDecimal.ONE));
// LONG
assertEquals(1L, div(1L, 1L));
assertEquals(1D, div(1L, 1F));
assertEquals(1D, div(1L, 1D));
assertEquals(BigInteger.ONE, div(1L, BigInteger.ONE));
assertEquals(BigDecimal.ONE, div(1L, BigDecimal.ONE));
// FLOAT
assertEquals(1F, div(1F, 1F));
assertEquals(1D, div(1F, 1D));
assertEquals(BigDecimal.ONE.setScale(1, RoundingMode.HALF_UP), div(1F, BigInteger.ONE));
assertEquals(BigDecimal.ONE.setScale(1, RoundingMode.HALF_UP), div(1F, BigDecimal.ONE));
// DOUBLE
assertEquals(1D, div(1D, 1D));
assertEquals(BigDecimal.ONE.setScale(1, RoundingMode.HALF_UP), div(1D, BigInteger.ONE));
assertEquals(BigDecimal.ONE.setScale(1, RoundingMode.HALF_UP), div(1D, BigDecimal.ONE));
// BIG INTEGER
assertEquals(BigInteger.ONE, div(BigInteger.ONE, BigInteger.ONE));
assertEquals(BigDecimal.ONE, div(BigInteger.ONE, BigDecimal.ONE));
// BIG DECIMAL
assertEquals(BigDecimal.ONE, div(BigDecimal.ONE, BigDecimal.ONE));
}
@Test
public void shouldDivideForceFloatingPointAndReturnCorrectType() {
// BYTE
assertEquals(1F, div((byte) 1, (byte) 1, true));
assertEquals(1F, div((byte) 1, (short) 1, true));
assertEquals(1F, div((byte) 1, 1, true));
assertEquals(1D, div((byte) 1, 1L, true));
assertEquals(1F, div((byte) 1, 1F, true));
assertEquals(1D, div((byte) 1, 1D, true));
assertEquals(BigDecimal.ONE, div((byte) 1, BigInteger.ONE, true));
assertEquals(BigDecimal.ONE, div((byte) 1, BigDecimal.ONE, true));
// SHORT
assertEquals(1F, div((short) 1, (short) 1, true));
assertEquals(1F, div((short) 1, 1, true));
assertEquals(1D, div((short) 1, 1L, true));
assertEquals(1F, div((short) 1, 1F, true));
assertEquals(1D, div((short) 1, 1D, true));
assertEquals(BigDecimal.ONE, div((short) 1, BigInteger.ONE, true));
assertEquals(BigDecimal.ONE, div((short) 1, BigDecimal.ONE, true));
// INTEGER
assertEquals(1F, div(1, 1, true));
assertEquals(1D, div(1, 1L, true));
assertEquals(1F, div(1, 1F, true));
assertEquals(1D, div(1, 1D, true));
assertEquals(BigDecimal.ONE, div(1, BigInteger.ONE, true));
assertEquals(BigDecimal.ONE, div(1, BigDecimal.ONE, true));
// LONG
assertEquals(1D, div(1L, 1L, true));
assertEquals(1D, div(1L, 1F, true));
assertEquals(1D, div(1L, 1D, true));
assertEquals(BigDecimal.ONE, div(1L, BigInteger.ONE, true));
assertEquals(BigDecimal.ONE, div(1L, BigDecimal.ONE, true));
// FLOAT
assertEquals(1F, div(1F, 1F, true));
assertEquals(1D, div(1F, 1D, true));
assertEquals(BigDecimal.ONE.setScale(1, RoundingMode.HALF_UP), div(1F, BigInteger.ONE, true));
assertEquals(BigDecimal.ONE.setScale(1, RoundingMode.HALF_UP), div(1F, BigDecimal.ONE, true));
// DOUBLE
assertEquals(1D, div(1D, 1D, true));
assertEquals(BigDecimal.ONE.setScale(1, RoundingMode.HALF_UP), div(1D, BigInteger.ONE, true));
assertEquals(BigDecimal.ONE.setScale(1, RoundingMode.HALF_UP), div(1D, BigDecimal.ONE, true));
// BIG INTEGER
assertEquals(BigDecimal.ONE, div(BigInteger.ONE, BigInteger.ONE, true));
assertEquals(BigDecimal.ONE, div(BigInteger.ONE, BigDecimal.ONE, true));
// BIG DECIMAL
assertEquals(BigDecimal.ONE, div(BigDecimal.ONE, BigDecimal.ONE, true));
}
@Test
public void testMinMaxCompare() {
final List<Number> zeros = Arrays.asList((byte) 0, (short) 0, 0, 0L, 0F, 0D, BigInteger.ZERO, BigDecimal.ZERO);
final List<Number> ones = Arrays.asList((byte) 1, (short) 1, 1, 1L, 1F, 1D, BigInteger.ONE, BigDecimal.ONE);
for (Number zero : zeros) {
for (Number one : ones) {
assertEquals(0, min(zero, one).intValue());
assertEquals(0, min(one, zero).intValue());
assertEquals(1, max(zero, one).intValue());
assertEquals(1, max(one, zero).intValue());
assertTrue(compare(zero, one) < 0);
assertTrue(compare(one, zero) > 0);
assertTrue(compare(zero, zero) == 0);
assertTrue(compare(one, one) == 0);
}
}
for (Number one : ones) {
assertEquals(1, min(null, one).intValue());
assertEquals(1, min(one, null).intValue());
assertEquals(1, max(null, one).intValue());
assertEquals(1, max(one, null).intValue());
assertEquals(-1, compare(null, one).intValue());
assertEquals(1, compare(one, null).intValue());
}
}
@Test
public void shouldPromoteFloatToDoubleForAdd() {
Number value = add(Float.MAX_VALUE, Float.MAX_VALUE);
assertTrue(value instanceof Double);
assertFalse(Double.isInfinite(value.doubleValue()));
}
@Test
public void shouldPromoteDoubleToInfiniteForAdd() {
Number value = add(Double.MAX_VALUE, Double.MAX_VALUE);
assertTrue(value instanceof Double);
assertTrue(Double.isInfinite(value.doubleValue()));
}
@Test
public void shouldPromoteFloatToDoubleForMul() {
Number value = mul(Float.MAX_VALUE, 2F);
assertTrue(value instanceof Double);
assertFalse(Double.isInfinite(value.doubleValue()));
}
@Test
public void shouldPromoteDoubleToInfiniteForMul() {
Number value = mul(Double.MAX_VALUE, 2F);
assertTrue(value instanceof Double);
assertTrue(Double.isInfinite(value.doubleValue()));
}
@Test
public void shouldPromoteFloatToDoubleForDiv() {
Number value = div(Float.MAX_VALUE, 0.5F);
assertTrue(value instanceof Double);
assertFalse(Double.isInfinite(value.doubleValue()));
}
@Test
public void shouldPromoteDoubleToInfiniteForDiv() {
Number value = div(Double.MAX_VALUE, 0.5F);
assertTrue(value instanceof Double);
assertTrue(Double.isInfinite(value.doubleValue()));
}
@Test
public void shouldThrowArithmeticExceptionOnOverflow() {
for (final Triplet<Number, Number, String> q : OVERFLOW_CASES) {
try {
switch (q.getValue2()) {
case "add":
add(q.getValue0(), q.getValue1());
break;
case "sub":
sub(q.getValue0(), q.getValue1());
break;
case "mul":
mul(q.getValue0(), q.getValue1());
break;
case "div":
div(q.getValue0(), q.getValue1());
break;
default:
fail("Unexpected math operation " + q.getValue2() + "'");
}
fail("ArithmeticException expected");
}
catch (ArithmeticException ex) {
// expected
}
}
}
@Test
public void shouldNotThrowArithmeticExceptionOnOverflow() {
for (final Quartet<Number, Number, String, String> q : NO_OVERFLOW_CASES) {
try {
Number result = 0;
switch (q.getValue2()) {
case "add":
result = add(q.getValue0(), q.getValue1());
break;
case "sub":
result = sub(q.getValue0(), q.getValue1());
break;
case "mul":
result = mul(q.getValue0(), q.getValue1());
break;
case "div":
result = div(q.getValue0(), q.getValue1());
break;
default:
fail("Unexpected math operation '" + q.getValue2() + "'");
}
String classValue = result.getClass().toString();
switch (q.getValue3()) {
case "s":
assertEquals("class java.lang.Short", classValue);
break;
case "i":
assertEquals("class java.lang.Integer", classValue);
break;
case "l":
assertEquals("class java.lang.Long", classValue);
break;
case "f":
assertEquals("class java.lang.Float", classValue);
break;
case "d":
assertEquals("class java.lang.Double", classValue);
break;
default:
fail("Unexpected class type '" + q.getValue3() + "'");
}
}
catch (ArithmeticException ex) {
fail("ArithmeticException bot expected");
}
}
}
@Test
public void shouldCoerceToReturnSameInstanceForSameClass() {
final Integer value = 42;
assertEquals(value, NumberHelper.coerceTo(value, Integer.class));
}
@Test
public void shouldCoerceToConvertToByte() {
final Integer value = 42;
assertEquals(Byte.valueOf((byte) 42), NumberHelper.coerceTo(value, Byte.class));
}
@Test
public void shouldCoerceToConvertToShort() {
final Integer value = 42;
assertEquals(Short.valueOf((short) 42), NumberHelper.coerceTo(value, Short.class));
}
@Test
public void shouldCoerceToConvertToLong() {
final Integer value = 42;
assertEquals(Long.valueOf(42L), NumberHelper.coerceTo(value, Long.class));
}
@Test
public void shouldCoerceToConvertToFloat() {
final Integer value = 42;
assertEquals(Float.valueOf(42.0f), NumberHelper.coerceTo(value, Float.class));
}
@Test
public void shouldCoerceToConvertToDouble() {
final Integer value = 42;
assertEquals(Double.valueOf(42.0), NumberHelper.coerceTo(value, Double.class));
}
@Test
public void shouldCoerceToConvertToBigInteger() {
final Integer value = 42;
assertEquals(BigInteger.valueOf(42), NumberHelper.coerceTo(value, BigInteger.class));
}
@Test
public void shouldCoerceToConvertToBigDecimal() {
final Integer value = 42;
assertEquals(BigDecimal.valueOf(42), NumberHelper.coerceTo(value, BigDecimal.class));
}
@Test
public void shouldCoerceToRetainOriginalTypeIfCannotFitInByte() {
final Integer value = 128;
assertEquals(value, NumberHelper.coerceTo(value, Byte.class));
}
@Test
public void shouldCoerceToRetainOriginalTypeIfCannotFitInShort() {
final Integer value = 32768;
assertEquals(value, NumberHelper.coerceTo(value, Short.class));
}
@Test
public void shouldCoerceToRetainOriginalTypeIfCannotFitInInteger() {
final Long value = 2147483648L;
assertEquals(value, NumberHelper.coerceTo(value, Integer.class));
}
@Test
public void shouldCoerceToRetainOriginalTypeIfCannotFitInFloat() {
final Double value = Double.MAX_VALUE;
assertEquals(value, NumberHelper.coerceTo(value, Float.class));
}
@Test
public void shouldCoerceToConvertToByteIfCanFit() {
final Integer value = 42;
assertEquals(Byte.valueOf((byte) 42), NumberHelper.coerceTo(value, Byte.class));
}
@Test
public void shouldCoerceToConvertToShortIfCanFit() {
final Integer value = 42;
assertEquals(Short.valueOf((short) 42), NumberHelper.coerceTo(value, Short.class));
}
@Test
public void shouldCoerceToConvertToIntegerIfCanFit() {
final Long value = 42L;
assertEquals(Integer.valueOf(42), NumberHelper.coerceTo(value, Integer.class));
}
@Test
public void shouldCoerceToConvertToFloatIfCanFit() {
final Double value = 42.0;
assertEquals(Float.valueOf(42.0f), NumberHelper.coerceTo(value, Float.class));
}
@Test
public void shouldCastToReturnSameInstanceForSameClass() {
final Integer value = 42;
assertEquals(value, NumberHelper.castTo(value, N.int_));
}
@Test
public void shouldCastToConvertToByte() {
final Integer value = 42;
assertEquals(Byte.valueOf((byte) 42), NumberHelper.castTo(value, N.byte_));
}
@Test
public void shouldCastToConvertToShort() {
final Integer value = 42;
assertEquals(Short.valueOf((short) 42), NumberHelper.castTo(value, N.short_));
}
@Test
public void shouldCastToConvertToLong() {
final Integer value = 42;
assertEquals(Long.valueOf(42L), NumberHelper.castTo(value, N.long_));
}
@Test
public void shouldCastToConvertToFloat() {
final Integer value = 42;
assertEquals(Float.valueOf(42.0f), NumberHelper.castTo(value, N.float_));
}
@Test
public void shouldCastToConvertToDouble() {
final Integer value = 42;
assertEquals(Double.valueOf(42.0), NumberHelper.castTo(value, N.double_));
}
@Test
public void shouldCastToConvertToBigInteger() {
final Integer value = 42;
assertEquals(BigInteger.valueOf(42), NumberHelper.castTo(value, N.bigInt));
}
@Test
public void shouldCastToConvertDoubleToBigInteger() {
final Double value = new Double("1000000000000000000000");
assertEquals(new BigInteger("1000000000000000000000"), NumberHelper.castTo(value, N.bigInt));
}
@Test
public void shouldCastToConvertToBigDecimal() {
final Integer value = 42;
assertEquals(BigDecimal.valueOf(42), NumberHelper.castTo(value, N.bigDecimal));
}
@Test(expected = ArithmeticException.class)
public void shouldOverflowIfCannotFitInByte() {
final Integer value = 128;
NumberHelper.castTo(value, N.byte_);
}
@Test(expected = ArithmeticException.class)
public void shouldOverflowIfCannotFitInShort() {
final Integer value = 32768;
NumberHelper.castTo(value, N.short_);
}
@Test(expected = ArithmeticException.class)
public void shouldOverflowIfCannotFitInInteger() {
final Long value = 2147483648L;
NumberHelper.castTo(value, N.int_);
}
@Test(expected = ArithmeticException.class)
public void shouldOverflowIfCannotFitInFloat() {
final Double value = Double.MAX_VALUE;
NumberHelper.castTo(value, N.float_);
}
@Test(expected = ArithmeticException.class)
public void shouldOverflowIfBigDecimalCannotFitInFloat() {
NumberHelper.castTo(new BigDecimal("1E400"), N.float_);
}
@Test(expected = ArithmeticException.class)
public void shouldOverflowIfBigIntegerCannotFitInFloat() {
NumberHelper.castTo(new BigInteger("1").shiftLeft(2000), N.float_);
}
@Test(expected = ArithmeticException.class)
public void shouldOverflowIfBigDecimalCannotFitInDouble() {
NumberHelper.castTo(new BigDecimal("1E400"), N.double_);
}
@Test(expected = ArithmeticException.class)
public void shouldOverflowIfBigIntegerCannotFitInDouble() {
NumberHelper.castTo(new BigInteger("1").shiftLeft(2000), N.double_);
}
@Test(expected = ArithmeticException.class)
public void shouldUnderflowIfDoubleCannotFitInInteger() {
final Double value = -Double.MAX_VALUE;
System.out.println(NumberHelper.castTo(value, N.int_));
}
@Test
public void shouldCastDoubleInfinityToFloatInfinity() {
assertEquals(Float.POSITIVE_INFINITY, NumberHelper.castTo(Double.POSITIVE_INFINITY, N.float_));
assertEquals(Float.NEGATIVE_INFINITY, NumberHelper.castTo(Double.NEGATIVE_INFINITY, N.float_));
}
@Test
public void shouldCastFloatInfinityToDoubleInfinity() {
assertEquals(Double.POSITIVE_INFINITY, NumberHelper.castTo(Float.POSITIVE_INFINITY, N.double_));
assertEquals(Double.NEGATIVE_INFINITY, NumberHelper.castTo(Float.NEGATIVE_INFINITY, N.double_));
}
@Test
public void shouldCastFloatNaNToDoubleNaN() {
assertEquals(Double.NaN, NumberHelper.castTo(Float.NaN, N.double_));
}
@Test
public void shouldCastDoubleNaNToFloatNaN() {
assertEquals(Float.NaN, NumberHelper.castTo(Double.NaN, N.float_));
}
}
|
google/closure-compiler | 34,045 | test/com/google/javascript/jscomp/RemoveUnusedCodePrototypePropertiesTest.java | /*
* Copyright 2017 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp;
import com.google.javascript.rhino.Node;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/**
* Tests for {@link RemoveUnusedCode} that cover removal of prototype properties and class
* properties.
*/
@RunWith(JUnit4.class)
public final class RemoveUnusedCodePrototypePropertiesTest extends CompilerTestCase {
private static final String EXTERNS =
MINIMAL_EXTERNS
+ """
var window;
var Math = {};
Math.random = function() {};
function alert(x) {}
function externFunction() {}
externFunction.prototype.externPropName;
var mExtern;
mExtern.bExtern;
mExtern['cExtern'];
/** @const */
var goog = {};
goog.reflect.objectProperty = function(name) { };
""";
private boolean keepLocals = true;
private boolean keepGlobals = false;
public RemoveUnusedCodePrototypePropertiesTest() {
super(EXTERNS);
}
@Override
protected int getNumRepetitions() {
return 1; // should reach fixed point in a single run
}
@Override
protected CompilerPass getProcessor(Compiler compiler) {
return new CompilerPass() {
@Override
public void process(Node externs, Node root) {
new RemoveUnusedCode.Builder(compiler)
.removeLocalVars(!keepLocals)
.removeGlobals(!keepGlobals)
.removeUnusedPrototypeProperties(true)
.build()
.process(externs, root);
}
};
}
@Override
public CompilerOptions getOptions() {
CompilerOptions options = super.getOptions();
options.setWarningLevel(DiagnosticGroups.MODULE_LOAD, CheckLevel.OFF);
return options;
}
@Override
@Before
public void setUp() throws Exception {
super.setUp();
// Allow testing of features that aren't fully supported for output yet.
enableNormalize();
// TODO(bradfordcsmith): Stop normalizing the expected output or document why it is necessary.
enableNormalizeExpectedOutput();
enableGatherExternProperties();
keepLocals = true;
keepGlobals = false;
}
@Test
public void testClassPropertiesNotRemoved() {
keepGlobals = true;
// This whole test class runs with removeUnusedClassProperties disabled.
testSame("/** @constructor */ function C() {} C.unused = 3;");
testSame(
"/** @constructor */ function C() {} Object.defineProperties(C, {unused: {value: 3}});");
}
@Test
public void testUnusedPrototypeFieldReference() {
test(
"function C() {} C.prototype.x; new C();", // x is not actually read
"function C() {} new C();");
}
@Test
public void testUnusedReferenceToFieldWithGetter() {
// Reference to a field with a getter should not be removed unless we know it has no side
// effects.
// TODO(bradfordcsmith): Implement removal for the no-side-effect cases.
testSame("function C() {} C.prototype = { get x() {} }; new C().x");
testSame("function C() {} C.prototype = { get x() { alert('x'); } }; new C().x");
testSame("class C { get x() {} } new C().x;");
testSame("class C { get x() { alert('x'); } } new C().x");
testSame("let c = { get x() {} }; c.x;");
testSame("let c = { get x() { alert('x'); } }; c.x;");
}
@Test
public void testAnonymousPrototypePropertyRemoved() {
test("({}.prototype.x = 5, externFunction())", "externFunction();");
test("({}).prototype.x = 5;", "");
test("({}).prototype.x = externFunction();", "externFunction();");
test("externFunction({}.prototype.x = 5);", "externFunction(5);");
test("externFunction().prototype.x = 5;", "externFunction();");
test("externFunction().prototype.x = externFunction();", "externFunction(), externFunction();");
// make sure an expression with '.prototype' is traversed when it should be and not when it
// shouldn't.
test(
"function C() {} externFunction(C).prototype.x = 5;", // preserve format
"function C() {} externFunction(C);");
test("function C() {} ({ C: C }).prototype.x = 5;", "");
}
@Test
public void testAnonymousPrototypePropertyNoRemoveSideEffect1() {
test(
"""
function A() { // preserve format
externFunction('me');
return function(){}
}
A().prototype.foo = function() {};
""",
"""
function A() { // preserve format
externFunction('me');
return function(){}
}
A();
""");
}
@Test
public void testAnonymousPrototypePropertyNoRemoveSideEffect2() {
test(
"function A() { externFunction('me'); return function(){}; } A().prototype.foo++;",
"function A() { externFunction('me'); return function(){}; } A();");
}
@Test
public void testIncPrototype() {
test("function A() {} A.prototype.x = 1; A.prototype.x++;", "");
test(
"function A() {} A.prototype.x = 1; A.prototype.x++; new A();",
"function A() {} new A();");
test("externFunction().prototype.x++", "externFunction()");
}
@Test
public void testRenamePropertyFunctionTest() {
test(
"""
function C() {} // preserve formatting
C.prototype.unreferenced = function() {};
C.prototype.renamed = function() {};
JSCompiler_renameProperty('renamed');
new C();
""",
"""
function C() {} // preserve formatting
C.prototype.renamed = function() {};
JSCompiler_renameProperty('renamed');
new C();
""");
}
@Test
public void testNonPrototypePropertiesAreKept() {
// foo cannot be removed because it is called
// x cannot be removed because a property is set on it and we don't know where it comes from
// x.a cannot be removed because we don't know where x comes from.
// x.prototype.b *can* be removed because we consider it safe to remove prototype properties
// that have no references.
test(
"function foo(x) { x.a = 1; x.prototype.b = 2; }; foo({});",
"function foo(x) { x.a = 1; }; foo({});");
}
@Test
public void testAnalyzePrototypeProperties() {
// Basic removal for prototype properties
test(
"""
function e(){}
e.prototype.a = function(){};
e.prototype.b = function(){};
var x = new e; x.a()
""",
"""
function e(){}
e.prototype.a = function(){};
var x = new e; x.a()
""");
}
@Test
public void testObjectLiteralPrototype() {
test(
"function e(){} e.prototype = {a: function(){}, b: function(){}}; var x = new e; x.a()",
"function e(){} e.prototype = {a: function(){} }; var x = new e; x.a()");
}
@Test
public void testObjectLiteralPrototypeUnusedPropDefinitionWithSideEffects() {
test(
"function e(){} e.prototype = {a: alert('a'), b: function(){}}; new e;",
"function e(){} e.prototype = {a: alert('a') }; new e;");
}
@Test
public void testPropertiesDefinedInExterns() {
test(
"""
function e(){}
e.prototype.a = function(){};
e.prototype.bExtern = function(){};
var x = new e;x.a()
""",
"""
function e(){}
e.prototype.a = function(){};
e.prototype.bExtern = function(){};
var x = new e; x.a()
""");
testSame(
"""
function e(){}
e.prototype = {a: function(){}, bExtern: function(){}};
var x = new e; x.a()
""");
testSame(
"""
class C {
constructor() {}
bExtern() {} // property name defined in externs.
}
new C();
""");
}
@Test
public void testAliasing1() {
// Aliasing a property is not enough for it to count as used
test(
"""
function e(){}
e.prototype.method1 = function(){};
e.prototype.method2 = function(){};
// aliases
e.prototype.alias1 = e.prototype.method1;
e.prototype.alias2 = e.prototype.method2;
var x = new e; x.method1()
""",
"""
function e(){}
e.prototype.method1 = function(){};
var x = new e; x.method1()
""");
// Using an alias should keep it
test(
"""
function e(){}
e.prototype.method1 = function(){};
e.prototype.method2 = function(){};
// aliases
e.prototype.alias1 = e.prototype.method1;
e.prototype.alias2 = e.prototype.method2;
var x=new e; x.alias1()
""",
"""
function e(){}
e.prototype.method1 = function(){};
e.prototype.alias1 = e.prototype.method1;
var x = new e; x.alias1()
""");
}
@Test
public void testAliasing2() {
// Aliasing a property is not enough for it to count as used
test(
"""
function e(){}
e.prototype.method1 = function(){};
// aliases
e.prototype.alias1 = e.prototype.method1;
(new e).method1()
""",
"""
function e(){}
e.prototype.method1 = function(){};
(new e).method1()
""");
// Using an alias should keep it
testSame(
"""
function e(){}
e.prototype.method1 = function(){};
// aliases
e.prototype.alias1 = e.prototype.method1;
(new e).alias1()
""");
}
@Test
public void testAliasing3() {
// Aliasing a property is not enough for it to count as used
testSame(
"""
function e(){}
e.prototype.method1 = function(){};
e.prototype.method2 = function(){};
// aliases
e.prototype['alias1'] = e.prototype.method1;
e.prototype['alias2'] = e.prototype.method2;
new e;
""");
}
@Test
public void testAliasing4() {
// Aliasing a property is not enough for it to count as used
test(
"""
function e(){}
e.prototype['alias1'] = e.prototype.method1 = function(){};
e.prototype['alias2'] = e.prototype.method2 = function(){};
new e;
""",
"""
function e(){}
e.prototype['alias1'] = function(){};
e.prototype['alias2'] = function(){};
new e;
""");
}
@Test
public void testAliasing5() {
// An exported alias must preserved any referenced values in the
// referenced function.
testSame(
"""
function e(){}
e.prototype.method1 = function(){this.method2()};
e.prototype.method2 = function(){};
// aliases
e.prototype['alias1'] = e.prototype.method1;
new e;
""");
}
@Test
public void testAliasing6() {
// An exported alias must preserved any referenced values in the
// referenced function.
test(
"""
function e(){}
e.prototype.method1 = function(){this.method2()};
e.prototype.method2 = function(){};
// aliases
window['alias1'] = e.prototype.method1;
""",
"""
function e(){}
e.prototype.method1=function(){this.method2()};
e.prototype.method2=function(){};
window['alias1']=e.prototype.method1;
""");
}
@Test
public void testAliasing7() {
// An exported alias must preserved any referenced values in the
// referenced function.
test(
"""
function e(){}
e.prototype['alias1'] = e.prototype.method1 = function(){this.method2()};
e.prototype.method2 = function(){};
new e;
""",
"""
function e(){}
e.prototype['alias1'] = function(){this.method2()};
e.prototype.method2 = function(){};
new e;
""");
}
@Test
public void testExportedMethodsByNamingConvention() {
String classAndItsMethodAliasedAsExtern =
"""
function Foo() {}
Foo.prototype.method = function() {};
// not removed
Foo.prototype.unused = function() {};
// removed
var _externInstance = new Foo();
Foo.prototype._externMethod = Foo.prototype.method // aliased here
""";
String compiled =
"""
function Foo(){}
Foo.prototype.method = function(){};
var _externInstance = new Foo;
Foo.prototype._externMethod = Foo.prototype.method
""";
test(classAndItsMethodAliasedAsExtern, compiled);
}
@Test
public void testExportedMethodsByNamingConventionAlwaysExported() {
String classAndItsMethodAliasedAsExtern =
"""
function Foo() {}
Foo.prototype.method = function() {};
// not removed
Foo.prototype.unused = function() {};
// removed
var _externInstance = new Foo();
Foo.prototype._externMethod = Foo.prototype.method // aliased here
""";
String compiled =
"""
function Foo(){}
Foo.prototype.method = function(){};
var _externInstance = new Foo;
Foo.prototype._externMethod = Foo.prototype.method
""";
test(classAndItsMethodAliasedAsExtern, compiled);
}
@Test
public void testExternMethodsFromExternsFile() {
String classAndItsMethodAliasedAsExtern =
"""
function Foo() {}
Foo.prototype.bar_ = function() {};
// not removed
Foo.prototype.unused = function() {};
// removed
var instance = new Foo;
Foo.prototype.externPropName = Foo.prototype.bar_ // aliased here
""";
String compiled =
"""
function Foo(){}
Foo.prototype.bar_ = function(){};
new Foo;
Foo.prototype.externPropName = Foo.prototype.bar_
""";
test(classAndItsMethodAliasedAsExtern, compiled);
}
@Test
public void testPropertyReferenceGraph() {
// test a prototype property graph that looks like so:
// b -> a, c -> b, c -> a, d -> c, e -> a, e -> f
String constructor = "function Foo() {}";
String defA = "Foo.prototype.a = function() { Foo.superClass_.a.call(this); };";
String defB = "Foo.prototype.b = function() { this.a(); };";
String defC =
"""
Foo.prototype.c = function() {
Foo.superClass_.c.call(this); this.b(); this.a(); };
""";
String defD = "Foo.prototype.d = function() { this.c(); };";
String defE = "Foo.prototype.e = function() { this.a(); this.f(); };";
String defF = "Foo.prototype.f = function() { };";
String fullClassDef = constructor + defA + defB + defC + defD + defE + defF;
// ensure that all prototypes are compiled out if none are used
test(fullClassDef, "");
// make sure that the right prototypes are called for each use
String callA = "(new Foo()).a();";
String callB = "(new Foo()).b();";
String callC = "(new Foo()).c();";
String callD = "(new Foo()).d();";
String callE = "(new Foo()).e();";
String callF = "(new Foo()).f();";
test(fullClassDef + callA, constructor + defA + callA);
test(fullClassDef + callB, constructor + defA + defB + callB);
test(fullClassDef + callC, constructor + defA + defB + defC + callC);
test(fullClassDef + callD, constructor + defA + defB + defC + defD + callD);
test(fullClassDef + callE, constructor + defA + defE + defF + callE);
test(fullClassDef + callF, constructor + defF + callF);
test(fullClassDef + callA + callC, constructor + defA + defB + defC + callA + callC);
test(fullClassDef + callB + callC, constructor + defA + defB + defC + callB + callC);
test(
fullClassDef + callA + callB + callC,
constructor + defA + defB + defC + callA + callB + callC);
}
@Test
public void testPropertiesDefinedWithGetElem() {
testSame("function Foo() {} Foo.prototype['elem'] = function() {}; new Foo;");
testSame("function Foo() {} Foo.prototype[1 + 1] = function() {}; new Foo;");
}
@Test
public void testQuotedProperties() {
// Basic removal for prototype replacement
testSame("function e(){} e.prototype = {'a': function(){}, 'b': function(){}}; new e;");
}
@Test
public void testNeverRemoveImplicitlyUsedProperties() {
testSame(
"""
function Foo() {}
Foo.prototype.length = 3;
Foo.prototype.toString = function() { return 'Foo'; };
Foo.prototype.valueOf = function() { return 'Foo'; };
new Foo;
""");
}
@Test
public void testPropertyDefinedInBranch() {
test("function Foo() {} if (true) Foo.prototype.baz = function() {};", "if (true);");
test("function Foo() {} while (true) Foo.prototype.baz = function() {};", "while (true);");
test("function Foo() {} for (;;) Foo.prototype.baz = function() {};", "for (;;);");
test(
"function Foo() {} do Foo.prototype.baz = function() {}; while(true);", "do; while(true);");
}
@Test
public void testUsingAnonymousObjectsToDefeatRemoval() {
test("function Foo() {} Foo.prototype.baz = 3; new Foo;", "function Foo() {} new Foo;");
testSame("function Foo() {} Foo.prototype.baz = 3; new Foo; var x = {}; x.baz;");
testSame("function Foo() {} Foo.prototype.baz = 3; new Foo; var x = {baz: 5}; x;");
// quoted properties still prevent removal
testSame("function Foo() {} Foo.prototype.baz = 3; new Foo; var x = {'baz': 5}; x;");
}
@Test
public void testGlobalFunctionsInGraph() {
test(
"""
var x = function() { (new Foo).baz(); };
var y = function() { x(); };
function Foo() {}
Foo.prototype.baz = function() { y(); };
""",
"");
}
@Test
public void testGlobalFunctionsInGraph2() {
test(
"""
var x = function() { (new Foo).baz(); };
var y = function() { x(); };
function Foo() { this.baz(); }
Foo.prototype.baz = function() { y(); };
""",
"");
}
@Test
public void testGlobalFunctionsInGraph3() {
test(
"""
var x = function() { (new Foo).baz(); };
var y = function() { x(); };
function Foo() { this.baz(); }
Foo.prototype.baz = function() { x(); };
""",
"");
}
@Test
public void testGlobalFunctionsInGraph4() {
test(
"""
var x = function() { (new Foo).baz(); };
var y = function() { x(); };
function Foo() { Foo.prototype.baz = function() { y(); }; }
""",
"");
}
@Test
public void testGlobalFunctionsInGraph5() {
test(
"""
function Foo() {}
Foo.prototype.methodA = function() {};
function x() { (new Foo).methodA(); }
Foo.prototype.methodB = function() { x(); };
""",
"");
keepGlobals = true;
test(
"""
function Foo() {}
Foo.prototype.methodA = function() {};
function x() { (new Foo).methodA(); }
Foo.prototype.methodB = function() { x(); };
""",
"""
function Foo() {}
Foo.prototype.methodA = function() {};
function x() { (new Foo).methodA(); }
""");
}
@Test
public void testGlobalFunctionsInGraph6() {
testSame(
"""
function Foo() {}
Foo.prototype.methodA = function() {};
function x() { (new Foo).methodA(); }
Foo.prototype.methodB = function() { x(); };
(new Foo).methodB();
""");
}
@Test
public void testGlobalFunctionsInGraph7() {
keepGlobals = true;
testSame("function Foo() {} Foo.prototype.methodA = function() {}; this.methodA();");
}
@Test
public void testGlobalFunctionsInGraph8() {
test(
"""
let x = function() { (new Foo).baz(); };
const y = function() { x(); };
function Foo() { Foo.prototype.baz = function() { y(); }; }
""",
"");
}
@Test
public void testGetterBaseline() {
keepGlobals = true;
test(
"""
function Foo() {}
Foo.prototype = {
methodA: function() {},
methodB: function() { x(); }
};
function x() { (new Foo).methodA(); }
""",
"""
function Foo() {}
Foo.prototype = {
methodA: function() {}
};
function x() { (new Foo).methodA(); }
""");
}
@Test
public void testGetter1() {
test(
"""
function Foo() {}
Foo.prototype = {
get methodA() {},
get methodB() { x(); }
};
function x() { (new Foo).methodA; }
new Foo();
""",
"""
function Foo() {}
// x() and all methods of Foo removed.
Foo.prototype = {};
new Foo();
""");
keepGlobals = true;
test(
"""
function Foo() {}
Foo.prototype = {
get methodA() {},
get methodB() { x(); }
};
function x() { (new Foo).methodA; }
""",
"""
function Foo() {}
Foo.prototype = {
get methodA() {}
};
// x() keeps methodA alive
function x() { (new Foo).methodA; }
""");
}
@Test
public void testGetter2() {
keepGlobals = true;
test(
"""
function Foo() {}
Foo.prototype = {
get methodA() {},
set methodA(a) {},
get methodB() { x(); },
set methodB(a) { x(); }
};
function x() { (new Foo).methodA; }
""",
"""
function Foo() {}
Foo.prototype = {
get methodA() {},
set methodA(a) {}
};
function x() { (new Foo).methodA; }
""");
}
@Test
public void testHook1() {
test(
"""
/** @constructor */ function Foo() {}
Foo.prototype.method1 =
Math.random()
? function() { this.method2(); }
: function() { this.method3(); };
Foo.prototype.method2 = function() {};
Foo.prototype.method3 = function() {};
""",
"");
}
@Test
public void testHook2() {
testSame(
"""
/** @constructor */ function Foo() {}
Foo.prototype.method1 =
Math.random()
? function() { this.method2(); }
: function() { this.method3(); };
Foo.prototype.method2 = function() {};
Foo.prototype.method3 = function() {};
(new Foo()).method1();
""");
}
@Test
public void testDestructuringProperty() {
// Makes the cases below shorter because we don't have to add references
// to globals to keep them around and just test prototype property removal.
keepGlobals = true;
test(
"function Foo() {} Foo.prototype.a = function() {}; var {} = new Foo();",
"function Foo() {} var {} = new Foo();");
test(
"""
function Foo() {}
Foo.prototype.a = function() {};
Foo.prototype.b = function() {}
var {a} = new Foo();
""",
"""
function Foo() {}
Foo.prototype.a = function() {};
var {a} = new Foo();
""");
test(
"""
function Foo() {}
Foo.prototype.a = function() {};
Foo.prototype.b = function() {}
var {a:x} = new Foo();
""",
"""
function Foo() {}
Foo.prototype.a = function() {};
var {a:x} = new Foo();
""");
testSame(
"""
function Foo() {}
Foo.prototype.a = function() {};
Foo.prototype.b = function() {}
var {a, b} = new Foo();
""");
testSame(
"""
function Foo() {}
Foo.prototype.a = function() {};
Foo.prototype.b = function() {}
var {a:x, b:y} = new Foo();
""");
testSame(
"""
function Foo() {} // preserve newlines
Foo.prototype.a = function() {};
let x;
({a:x} = new Foo());
""");
testSame(
"""
function Foo() {}
Foo.prototype.a = function() {};
function f({a:x}) { x; }; f(new Foo());
""");
testSame(
"""
function Foo() {}
Foo.prototype.a = function() {};
var {a : x = 3} = new Foo();
""");
test(
"""
function Foo() {}
Foo.prototype.a = function() {};
Foo.prototype.b = function() {}
var {a : a = 3} = new Foo();
""",
"""
function Foo() {}
Foo.prototype.a = function() {};
var {a : a = 3} = new Foo();
""");
testSame(
"""
function Foo() {}
Foo.prototype.a = function() {};
let { a : [b, c, d] } = new Foo();
""");
testSame(
"""
function Foo() {}
Foo.prototype.a = function() {};
const { a : { b : { c : d = '' }}} = new Foo();
""");
}
@Test
public void testDestructuringRest() {
// Makes the cases below shorter because we don't have to add references
// to globals to keep them around and just test prototype property removal.
keepGlobals = true;
testSame(
"""
function Foo() {}
Foo.prototype.a = function() {};
({ ...new Foo().a.b } = 0);
""");
}
@Test
public void testOptionalGetPropPreventsRemoval() {
test(
"""
class C {
constructor() {
this.x = 1;
}
optChainGetPropRef() {}
optChainCallRef() {}
unreferenced() {}
}
var c = new C;
c?.optChainGetPropRef()
c.optChainCallRef?.()
// no call to unreferenced()
""",
"""
class C {
constructor() {
this.x = 1;
}
optChainGetPropRef() {} // kept
optChainCallRef() {} // kept
// unreferenced() removed
}
var c = new C;
c?.optChainGetPropRef()
c.optChainCallRef?.()
// no call to unreferenced()
""");
}
@Test
public void testEs6Class() {
testSame(
"""
class C {
constructor() { // constructor is not removable
this.x = 1;
}
}
new C();
""");
test(
"""
class C {
constructor() {
this.x = 1;
}
foo() {}
}
var c = new C
""",
"""
class C {
constructor() { // constructor is not removable
this.x = 1;
}
}
new C();
""");
testSame(
"""
class C {
constructor() {
this.x = 1;
}
foo() {}
}
var c = new C
c.foo()
""");
test(
"""
class C {
constructor() {
this.x = 1;
}
static foo() {}
}
new C;
""",
"""
class C {
constructor() { // constructor is not removable
this.x = 1;
}
// TODO(b/139319709): Remove this. static method removal is disabled.
static foo() {}
}
new C();
""");
test(
"""
class C {
constructor() {
this.x = 1;
}
get foo() {}
set foo(val) {}
}
var c = new C
""",
"class C { constructor() { this.x = 1; } } new C");
testSame(
"""
class C {
constructor() {
this.x = 1;
}
get foo() {}
set foo(val) {}
}
var c = new C;
c.foo = 3;
""");
testSame(
"""
class C {
constructor() {
this.x = 1;
}
get foo() {}
set foo(val) {}
}
var c = new C;
c.foo;
""");
}
@Test
public void testEs6Extends() {
testSame(
"""
class C {
constructor() {
this.x = 1;
}
}
class D extends C {
constructor() {}
}
new D();
""");
testSame(
"""
class C {
constructor() {
this.x = 1;
}
foo() {}
}
class D extends C {
constructor() {}
foo() {
return super.foo()
}
}
var d = new D
d.foo()
""");
test(
"""
class C {
constructor() {
this.x = 1;
}
foo() {}
}
class D extends C {
constructor() {}
foo() {
return super.foo()
}
}
var d = new D;
""",
"""
class C {
constructor() {
this.x = 1;
}
}
class D extends C {
constructor() {}
}
new D;
""");
}
@Test
public void testAnonClasses() {
// Make sure class expression names are removed.
keepLocals = false;
test(
"""
var C = class InnerC {
constructor() {
this.x = 1;
}
foo() {}
};
new C;
""",
"var C = class { constructor() { this.x = 1; } }; new C;");
testSame(
"""
var C = class {
constructor() {
this.x = 1;
}
foo() {}
}
var c = new C()
c.foo()
""");
test(
"""
var C = class {}
C.D = class {
constructor() {
this.x = 1;
}
foo() {}
}
new C.D();
""",
"""
var C = class {}
C.D = class{
constructor() {
this.x = 1;
}
}
new C.D();
""");
test(
"externFunction(class C { constructor() { } externPropName() { } })",
"externFunction(class { constructor() { } externPropName() { } })");
}
@Test
public void testBaseClassExpressionHasSideEffects() {
// Make sure names are removed from class expressions.
keepLocals = false;
testSame(
"""
function getBaseClass() { return class {}; }
class C extends getBaseClass() {}
""");
test(
"""
function getBaseClass() { return class {}; }
const C = class InnerC extends getBaseClass() {};
""",
"""
function getBaseClass() { return class {}; }
(class extends getBaseClass() {})
""");
test(
"""
function getBaseClass() { return class {}; }
let C;
C = class InnerC extends getBaseClass() {}
""",
"""
function getBaseClass() { return class {}; }
(class extends getBaseClass() {})
""");
test(
"""
function getBaseClass() { return class {}; }
externFunction(class InnerC extends getBaseClass() {})
""",
"""
function getBaseClass() { return class {}; }
externFunction(class extends getBaseClass() {})
""");
}
@Test
public void testModules() {
testSame("export default function(){}");
testSame("export class C {};");
testSame("class Bar {} export {Bar}");
testSame("import { square, diag } from '/lib';");
testSame("import * as lib from '/lib';");
}
@Test
public void testReflection_reflectProperty_pinsReflectedName() {
testSame(
"""
/** @constructor */
function Foo() {}
Foo.prototype.handle = function(x, y) { alert(y); };
goog.reflect.objectProperty('handle');
alert(new Foo());
""");
}
@Test
public void testReflection_reflectProperty_onlyPinsReflectedName() {
test(
"""
/** @constructor */
function Foo() {}
Foo.prototype.handle = function(x, y) { alert(y); };
goog.reflect.objectProperty('not_handle');
alert(new Foo());
""",
"""
/** @constructor */
function Foo() {}
goog.reflect.objectProperty('not_handle');
alert(new Foo());
""");
}
@Test
public void testReflection_reflectProperty_onlyPinsReflectedName_whenNameMissing() {
test(
"""
/** @constructor */
function Foo() {}
Foo.prototype.handle = function(x, y) { alert(y); };
goog.reflect.objectProperty();
alert(new Foo());
""",
"""
/** @constructor */
function Foo() {}
goog.reflect.objectProperty();
alert(new Foo());
""");
}
@Test
public void testPureOrBreakMyCode() {
test(
"""
/** @constructor */
function Foo() {}
Foo.prototype.used = /** @pureOrBreakMyCode */(alert());
Foo.prototype.unused = /** @pureOrBreakMyCode */(alert());
function foo() {
return new Foo().used;
}
foo();
""",
"""
/** @constructor */
function Foo() {}
Foo.prototype.used = /** @pureOrBreakMyCode */(alert());
function foo() {
return new Foo().used;
}
foo();
""");
}
}
|
googleapis/google-cloud-java | 35,056 | java-vmwareengine/proto-google-cloud-vmwareengine-v1/src/main/java/com/google/cloud/vmwareengine/v1/DeleteExternalAccessRuleRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/vmwareengine/v1/vmwareengine.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.vmwareengine.v1;
/**
*
*
* <pre>
* Request message for
* [VmwareEngine.DeleteExternalAccessRule][google.cloud.vmwareengine.v1.VmwareEngine.DeleteExternalAccessRule]
* </pre>
*
* Protobuf type {@code google.cloud.vmwareengine.v1.DeleteExternalAccessRuleRequest}
*/
public final class DeleteExternalAccessRuleRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.vmwareengine.v1.DeleteExternalAccessRuleRequest)
DeleteExternalAccessRuleRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use DeleteExternalAccessRuleRequest.newBuilder() to construct.
private DeleteExternalAccessRuleRequest(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private DeleteExternalAccessRuleRequest() {
name_ = "";
requestId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new DeleteExternalAccessRuleRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.vmwareengine.v1.VmwareengineProto
.internal_static_google_cloud_vmwareengine_v1_DeleteExternalAccessRuleRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.vmwareengine.v1.VmwareengineProto
.internal_static_google_cloud_vmwareengine_v1_DeleteExternalAccessRuleRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.vmwareengine.v1.DeleteExternalAccessRuleRequest.class,
com.google.cloud.vmwareengine.v1.DeleteExternalAccessRuleRequest.Builder.class);
}
public static final int NAME_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object name_ = "";
/**
*
*
* <pre>
* Required. The resource name of the external access firewall rule to delete.
* Resource names are schemeless URIs that follow the conventions in
* https://cloud.google.com/apis/design/resource_names.
* For example:
* `projects/my-project/locations/us-central1/networkPolicies/my-policy/externalAccessRules/my-rule`
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The name.
*/
@java.lang.Override
public java.lang.String getName() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The resource name of the external access firewall rule to delete.
* Resource names are schemeless URIs that follow the conventions in
* https://cloud.google.com/apis/design/resource_names.
* For example:
* `projects/my-project/locations/us-central1/networkPolicies/my-policy/externalAccessRules/my-rule`
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for name.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int REQUEST_ID_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object requestId_ = "";
/**
*
*
* <pre>
* Optional. A request ID to identify requests. Specify a unique request ID
* so that if you must retry your request, the server will know to ignore
* the request if it has already been completed. The server guarantees that a
* request doesn't result in creation of duplicate commitments for at least 60
* minutes.
*
* For example, consider a situation where you make an initial request and the
* request times out. If you make the request again with the same request
* ID, the server can check if the original operation with the same request ID
* was received, and if so, will ignore the second request. This prevents
* clients from accidentally creating duplicate commitments.
*
* The request ID must be a valid UUID with the exception that zero UUID is
* not supported (00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The requestId.
*/
@java.lang.Override
public java.lang.String getRequestId() {
java.lang.Object ref = requestId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
requestId_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. A request ID to identify requests. Specify a unique request ID
* so that if you must retry your request, the server will know to ignore
* the request if it has already been completed. The server guarantees that a
* request doesn't result in creation of duplicate commitments for at least 60
* minutes.
*
* For example, consider a situation where you make an initial request and the
* request times out. If you make the request again with the same request
* ID, the server can check if the original operation with the same request ID
* was received, and if so, will ignore the second request. This prevents
* clients from accidentally creating duplicate commitments.
*
* The request ID must be a valid UUID with the exception that zero UUID is
* not supported (00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for requestId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getRequestIdBytes() {
java.lang.Object ref = requestId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
requestId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(requestId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, requestId_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(requestId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, requestId_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.vmwareengine.v1.DeleteExternalAccessRuleRequest)) {
return super.equals(obj);
}
com.google.cloud.vmwareengine.v1.DeleteExternalAccessRuleRequest other =
(com.google.cloud.vmwareengine.v1.DeleteExternalAccessRuleRequest) obj;
if (!getName().equals(other.getName())) return false;
if (!getRequestId().equals(other.getRequestId())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + NAME_FIELD_NUMBER;
hash = (53 * hash) + getName().hashCode();
hash = (37 * hash) + REQUEST_ID_FIELD_NUMBER;
hash = (53 * hash) + getRequestId().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.vmwareengine.v1.DeleteExternalAccessRuleRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.vmwareengine.v1.DeleteExternalAccessRuleRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.vmwareengine.v1.DeleteExternalAccessRuleRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.vmwareengine.v1.DeleteExternalAccessRuleRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.vmwareengine.v1.DeleteExternalAccessRuleRequest parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.vmwareengine.v1.DeleteExternalAccessRuleRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.vmwareengine.v1.DeleteExternalAccessRuleRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.vmwareengine.v1.DeleteExternalAccessRuleRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.vmwareengine.v1.DeleteExternalAccessRuleRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.vmwareengine.v1.DeleteExternalAccessRuleRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.vmwareengine.v1.DeleteExternalAccessRuleRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.vmwareengine.v1.DeleteExternalAccessRuleRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.vmwareengine.v1.DeleteExternalAccessRuleRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for
* [VmwareEngine.DeleteExternalAccessRule][google.cloud.vmwareengine.v1.VmwareEngine.DeleteExternalAccessRule]
* </pre>
*
* Protobuf type {@code google.cloud.vmwareengine.v1.DeleteExternalAccessRuleRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.vmwareengine.v1.DeleteExternalAccessRuleRequest)
com.google.cloud.vmwareengine.v1.DeleteExternalAccessRuleRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.vmwareengine.v1.VmwareengineProto
.internal_static_google_cloud_vmwareengine_v1_DeleteExternalAccessRuleRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.vmwareengine.v1.VmwareengineProto
.internal_static_google_cloud_vmwareengine_v1_DeleteExternalAccessRuleRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.vmwareengine.v1.DeleteExternalAccessRuleRequest.class,
com.google.cloud.vmwareengine.v1.DeleteExternalAccessRuleRequest.Builder.class);
}
// Construct using com.google.cloud.vmwareengine.v1.DeleteExternalAccessRuleRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
name_ = "";
requestId_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.vmwareengine.v1.VmwareengineProto
.internal_static_google_cloud_vmwareengine_v1_DeleteExternalAccessRuleRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.vmwareengine.v1.DeleteExternalAccessRuleRequest
getDefaultInstanceForType() {
return com.google.cloud.vmwareengine.v1.DeleteExternalAccessRuleRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.vmwareengine.v1.DeleteExternalAccessRuleRequest build() {
com.google.cloud.vmwareengine.v1.DeleteExternalAccessRuleRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.vmwareengine.v1.DeleteExternalAccessRuleRequest buildPartial() {
com.google.cloud.vmwareengine.v1.DeleteExternalAccessRuleRequest result =
new com.google.cloud.vmwareengine.v1.DeleteExternalAccessRuleRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.vmwareengine.v1.DeleteExternalAccessRuleRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.name_ = name_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.requestId_ = requestId_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.vmwareengine.v1.DeleteExternalAccessRuleRequest) {
return mergeFrom((com.google.cloud.vmwareengine.v1.DeleteExternalAccessRuleRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.vmwareengine.v1.DeleteExternalAccessRuleRequest other) {
if (other
== com.google.cloud.vmwareengine.v1.DeleteExternalAccessRuleRequest.getDefaultInstance())
return this;
if (!other.getName().isEmpty()) {
name_ = other.name_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getRequestId().isEmpty()) {
requestId_ = other.requestId_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
name_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
requestId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object name_ = "";
/**
*
*
* <pre>
* Required. The resource name of the external access firewall rule to delete.
* Resource names are schemeless URIs that follow the conventions in
* https://cloud.google.com/apis/design/resource_names.
* For example:
* `projects/my-project/locations/us-central1/networkPolicies/my-policy/externalAccessRules/my-rule`
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The name.
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The resource name of the external access firewall rule to delete.
* Resource names are schemeless URIs that follow the conventions in
* https://cloud.google.com/apis/design/resource_names.
* For example:
* `projects/my-project/locations/us-central1/networkPolicies/my-policy/externalAccessRules/my-rule`
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for name.
*/
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The resource name of the external access firewall rule to delete.
* Resource names are schemeless URIs that follow the conventions in
* https://cloud.google.com/apis/design/resource_names.
* For example:
* `projects/my-project/locations/us-central1/networkPolicies/my-policy/externalAccessRules/my-rule`
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The name to set.
* @return This builder for chaining.
*/
public Builder setName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
name_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The resource name of the external access firewall rule to delete.
* Resource names are schemeless URIs that follow the conventions in
* https://cloud.google.com/apis/design/resource_names.
* For example:
* `projects/my-project/locations/us-central1/networkPolicies/my-policy/externalAccessRules/my-rule`
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearName() {
name_ = getDefaultInstance().getName();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The resource name of the external access firewall rule to delete.
* Resource names are schemeless URIs that follow the conventions in
* https://cloud.google.com/apis/design/resource_names.
* For example:
* `projects/my-project/locations/us-central1/networkPolicies/my-policy/externalAccessRules/my-rule`
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for name to set.
* @return This builder for chaining.
*/
public Builder setNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
name_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object requestId_ = "";
/**
*
*
* <pre>
* Optional. A request ID to identify requests. Specify a unique request ID
* so that if you must retry your request, the server will know to ignore
* the request if it has already been completed. The server guarantees that a
* request doesn't result in creation of duplicate commitments for at least 60
* minutes.
*
* For example, consider a situation where you make an initial request and the
* request times out. If you make the request again with the same request
* ID, the server can check if the original operation with the same request ID
* was received, and if so, will ignore the second request. This prevents
* clients from accidentally creating duplicate commitments.
*
* The request ID must be a valid UUID with the exception that zero UUID is
* not supported (00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The requestId.
*/
public java.lang.String getRequestId() {
java.lang.Object ref = requestId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
requestId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. A request ID to identify requests. Specify a unique request ID
* so that if you must retry your request, the server will know to ignore
* the request if it has already been completed. The server guarantees that a
* request doesn't result in creation of duplicate commitments for at least 60
* minutes.
*
* For example, consider a situation where you make an initial request and the
* request times out. If you make the request again with the same request
* ID, the server can check if the original operation with the same request ID
* was received, and if so, will ignore the second request. This prevents
* clients from accidentally creating duplicate commitments.
*
* The request ID must be a valid UUID with the exception that zero UUID is
* not supported (00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for requestId.
*/
public com.google.protobuf.ByteString getRequestIdBytes() {
java.lang.Object ref = requestId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
requestId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. A request ID to identify requests. Specify a unique request ID
* so that if you must retry your request, the server will know to ignore
* the request if it has already been completed. The server guarantees that a
* request doesn't result in creation of duplicate commitments for at least 60
* minutes.
*
* For example, consider a situation where you make an initial request and the
* request times out. If you make the request again with the same request
* ID, the server can check if the original operation with the same request ID
* was received, and if so, will ignore the second request. This prevents
* clients from accidentally creating duplicate commitments.
*
* The request ID must be a valid UUID with the exception that zero UUID is
* not supported (00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The requestId to set.
* @return This builder for chaining.
*/
public Builder setRequestId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
requestId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. A request ID to identify requests. Specify a unique request ID
* so that if you must retry your request, the server will know to ignore
* the request if it has already been completed. The server guarantees that a
* request doesn't result in creation of duplicate commitments for at least 60
* minutes.
*
* For example, consider a situation where you make an initial request and the
* request times out. If you make the request again with the same request
* ID, the server can check if the original operation with the same request ID
* was received, and if so, will ignore the second request. This prevents
* clients from accidentally creating duplicate commitments.
*
* The request ID must be a valid UUID with the exception that zero UUID is
* not supported (00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearRequestId() {
requestId_ = getDefaultInstance().getRequestId();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. A request ID to identify requests. Specify a unique request ID
* so that if you must retry your request, the server will know to ignore
* the request if it has already been completed. The server guarantees that a
* request doesn't result in creation of duplicate commitments for at least 60
* minutes.
*
* For example, consider a situation where you make an initial request and the
* request times out. If you make the request again with the same request
* ID, the server can check if the original operation with the same request ID
* was received, and if so, will ignore the second request. This prevents
* clients from accidentally creating duplicate commitments.
*
* The request ID must be a valid UUID with the exception that zero UUID is
* not supported (00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for requestId to set.
* @return This builder for chaining.
*/
public Builder setRequestIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
requestId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.vmwareengine.v1.DeleteExternalAccessRuleRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.vmwareengine.v1.DeleteExternalAccessRuleRequest)
private static final com.google.cloud.vmwareengine.v1.DeleteExternalAccessRuleRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.vmwareengine.v1.DeleteExternalAccessRuleRequest();
}
public static com.google.cloud.vmwareengine.v1.DeleteExternalAccessRuleRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<DeleteExternalAccessRuleRequest> PARSER =
new com.google.protobuf.AbstractParser<DeleteExternalAccessRuleRequest>() {
@java.lang.Override
public DeleteExternalAccessRuleRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<DeleteExternalAccessRuleRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<DeleteExternalAccessRuleRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.vmwareengine.v1.DeleteExternalAccessRuleRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleads/google-ads-java | 35,069 | google-ads-stubs-v21/src/main/java/com/google/ads/googleads/v21/resources/CampaignSearchTermView.java | // Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v21/resources/campaign_search_term_view.proto
// Protobuf Java Version: 3.25.7
package com.google.ads.googleads.v21.resources;
/**
* <pre>
* This report provides granular performance data, including cost metrics, for
* each individual search term that triggered your ads.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v21.resources.CampaignSearchTermView}
*/
public final class CampaignSearchTermView extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v21.resources.CampaignSearchTermView)
CampaignSearchTermViewOrBuilder {
private static final long serialVersionUID = 0L;
// Use CampaignSearchTermView.newBuilder() to construct.
private CampaignSearchTermView(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CampaignSearchTermView() {
resourceName_ = "";
searchTerm_ = "";
campaign_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new CampaignSearchTermView();
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v21.resources.CampaignSearchTermViewProto.internal_static_google_ads_googleads_v21_resources_CampaignSearchTermView_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v21.resources.CampaignSearchTermViewProto.internal_static_google_ads_googleads_v21_resources_CampaignSearchTermView_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v21.resources.CampaignSearchTermView.class, com.google.ads.googleads.v21.resources.CampaignSearchTermView.Builder.class);
}
private int bitField0_;
public static final int RESOURCE_NAME_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object resourceName_ = "";
/**
* <pre>
* Output only. The resource name of the campaign search term view.
* Campaign search term view resource names have the form:
*
* `customers/{customer_id}/campaignSearchTermViews/{campaign_id}~{URL-base64_search_term}`
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code>
* @return The resourceName.
*/
@java.lang.Override
public java.lang.String getResourceName() {
java.lang.Object ref = resourceName_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
resourceName_ = s;
return s;
}
}
/**
* <pre>
* Output only. The resource name of the campaign search term view.
* Campaign search term view resource names have the form:
*
* `customers/{customer_id}/campaignSearchTermViews/{campaign_id}~{URL-base64_search_term}`
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code>
* @return The bytes for resourceName.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getResourceNameBytes() {
java.lang.Object ref = resourceName_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
resourceName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int SEARCH_TERM_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object searchTerm_ = "";
/**
* <pre>
* Output only. The search term.
* </pre>
*
* <code>optional string search_term = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return Whether the searchTerm field is set.
*/
@java.lang.Override
public boolean hasSearchTerm() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* <pre>
* Output only. The search term.
* </pre>
*
* <code>optional string search_term = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return The searchTerm.
*/
@java.lang.Override
public java.lang.String getSearchTerm() {
java.lang.Object ref = searchTerm_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
searchTerm_ = s;
return s;
}
}
/**
* <pre>
* Output only. The search term.
* </pre>
*
* <code>optional string search_term = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return The bytes for searchTerm.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getSearchTermBytes() {
java.lang.Object ref = searchTerm_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
searchTerm_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int CAMPAIGN_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object campaign_ = "";
/**
* <pre>
* Output only. The campaign the search term served in.
* </pre>
*
* <code>optional string campaign = 3 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code>
* @return Whether the campaign field is set.
*/
@java.lang.Override
public boolean hasCampaign() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* <pre>
* Output only. The campaign the search term served in.
* </pre>
*
* <code>optional string campaign = 3 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code>
* @return The campaign.
*/
@java.lang.Override
public java.lang.String getCampaign() {
java.lang.Object ref = campaign_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
campaign_ = s;
return s;
}
}
/**
* <pre>
* Output only. The campaign the search term served in.
* </pre>
*
* <code>optional string campaign = 3 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code>
* @return The bytes for campaign.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getCampaignBytes() {
java.lang.Object ref = campaign_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
campaign_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resourceName_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, resourceName_);
}
if (((bitField0_ & 0x00000001) != 0)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, searchTerm_);
}
if (((bitField0_ & 0x00000002) != 0)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, campaign_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resourceName_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, resourceName_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, searchTerm_);
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, campaign_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v21.resources.CampaignSearchTermView)) {
return super.equals(obj);
}
com.google.ads.googleads.v21.resources.CampaignSearchTermView other = (com.google.ads.googleads.v21.resources.CampaignSearchTermView) obj;
if (!getResourceName()
.equals(other.getResourceName())) return false;
if (hasSearchTerm() != other.hasSearchTerm()) return false;
if (hasSearchTerm()) {
if (!getSearchTerm()
.equals(other.getSearchTerm())) return false;
}
if (hasCampaign() != other.hasCampaign()) return false;
if (hasCampaign()) {
if (!getCampaign()
.equals(other.getCampaign())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + RESOURCE_NAME_FIELD_NUMBER;
hash = (53 * hash) + getResourceName().hashCode();
if (hasSearchTerm()) {
hash = (37 * hash) + SEARCH_TERM_FIELD_NUMBER;
hash = (53 * hash) + getSearchTerm().hashCode();
}
if (hasCampaign()) {
hash = (37 * hash) + CAMPAIGN_FIELD_NUMBER;
hash = (53 * hash) + getCampaign().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v21.resources.CampaignSearchTermView parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v21.resources.CampaignSearchTermView parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v21.resources.CampaignSearchTermView parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v21.resources.CampaignSearchTermView parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v21.resources.CampaignSearchTermView parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v21.resources.CampaignSearchTermView parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v21.resources.CampaignSearchTermView parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v21.resources.CampaignSearchTermView parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v21.resources.CampaignSearchTermView parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v21.resources.CampaignSearchTermView parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v21.resources.CampaignSearchTermView parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v21.resources.CampaignSearchTermView parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v21.resources.CampaignSearchTermView prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* This report provides granular performance data, including cost metrics, for
* each individual search term that triggered your ads.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v21.resources.CampaignSearchTermView}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v21.resources.CampaignSearchTermView)
com.google.ads.googleads.v21.resources.CampaignSearchTermViewOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v21.resources.CampaignSearchTermViewProto.internal_static_google_ads_googleads_v21_resources_CampaignSearchTermView_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v21.resources.CampaignSearchTermViewProto.internal_static_google_ads_googleads_v21_resources_CampaignSearchTermView_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v21.resources.CampaignSearchTermView.class, com.google.ads.googleads.v21.resources.CampaignSearchTermView.Builder.class);
}
// Construct using com.google.ads.googleads.v21.resources.CampaignSearchTermView.newBuilder()
private Builder() {
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
resourceName_ = "";
searchTerm_ = "";
campaign_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v21.resources.CampaignSearchTermViewProto.internal_static_google_ads_googleads_v21_resources_CampaignSearchTermView_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v21.resources.CampaignSearchTermView getDefaultInstanceForType() {
return com.google.ads.googleads.v21.resources.CampaignSearchTermView.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v21.resources.CampaignSearchTermView build() {
com.google.ads.googleads.v21.resources.CampaignSearchTermView result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v21.resources.CampaignSearchTermView buildPartial() {
com.google.ads.googleads.v21.resources.CampaignSearchTermView result = new com.google.ads.googleads.v21.resources.CampaignSearchTermView(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(com.google.ads.googleads.v21.resources.CampaignSearchTermView result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.resourceName_ = resourceName_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.searchTerm_ = searchTerm_;
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.campaign_ = campaign_;
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v21.resources.CampaignSearchTermView) {
return mergeFrom((com.google.ads.googleads.v21.resources.CampaignSearchTermView)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v21.resources.CampaignSearchTermView other) {
if (other == com.google.ads.googleads.v21.resources.CampaignSearchTermView.getDefaultInstance()) return this;
if (!other.getResourceName().isEmpty()) {
resourceName_ = other.resourceName_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.hasSearchTerm()) {
searchTerm_ = other.searchTerm_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.hasCampaign()) {
campaign_ = other.campaign_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
resourceName_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18: {
searchTerm_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 26: {
campaign_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object resourceName_ = "";
/**
* <pre>
* Output only. The resource name of the campaign search term view.
* Campaign search term view resource names have the form:
*
* `customers/{customer_id}/campaignSearchTermViews/{campaign_id}~{URL-base64_search_term}`
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code>
* @return The resourceName.
*/
public java.lang.String getResourceName() {
java.lang.Object ref = resourceName_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
resourceName_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* Output only. The resource name of the campaign search term view.
* Campaign search term view resource names have the form:
*
* `customers/{customer_id}/campaignSearchTermViews/{campaign_id}~{URL-base64_search_term}`
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code>
* @return The bytes for resourceName.
*/
public com.google.protobuf.ByteString
getResourceNameBytes() {
java.lang.Object ref = resourceName_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
resourceName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* Output only. The resource name of the campaign search term view.
* Campaign search term view resource names have the form:
*
* `customers/{customer_id}/campaignSearchTermViews/{campaign_id}~{URL-base64_search_term}`
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code>
* @param value The resourceName to set.
* @return This builder for chaining.
*/
public Builder setResourceName(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
resourceName_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* <pre>
* Output only. The resource name of the campaign search term view.
* Campaign search term view resource names have the form:
*
* `customers/{customer_id}/campaignSearchTermViews/{campaign_id}~{URL-base64_search_term}`
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code>
* @return This builder for chaining.
*/
public Builder clearResourceName() {
resourceName_ = getDefaultInstance().getResourceName();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
* <pre>
* Output only. The resource name of the campaign search term view.
* Campaign search term view resource names have the form:
*
* `customers/{customer_id}/campaignSearchTermViews/{campaign_id}~{URL-base64_search_term}`
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code>
* @param value The bytes for resourceName to set.
* @return This builder for chaining.
*/
public Builder setResourceNameBytes(
com.google.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
checkByteStringIsUtf8(value);
resourceName_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object searchTerm_ = "";
/**
* <pre>
* Output only. The search term.
* </pre>
*
* <code>optional string search_term = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return Whether the searchTerm field is set.
*/
public boolean hasSearchTerm() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* <pre>
* Output only. The search term.
* </pre>
*
* <code>optional string search_term = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return The searchTerm.
*/
public java.lang.String getSearchTerm() {
java.lang.Object ref = searchTerm_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
searchTerm_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* Output only. The search term.
* </pre>
*
* <code>optional string search_term = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return The bytes for searchTerm.
*/
public com.google.protobuf.ByteString
getSearchTermBytes() {
java.lang.Object ref = searchTerm_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
searchTerm_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* Output only. The search term.
* </pre>
*
* <code>optional string search_term = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @param value The searchTerm to set.
* @return This builder for chaining.
*/
public Builder setSearchTerm(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
searchTerm_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* <pre>
* Output only. The search term.
* </pre>
*
* <code>optional string search_term = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return This builder for chaining.
*/
public Builder clearSearchTerm() {
searchTerm_ = getDefaultInstance().getSearchTerm();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
* <pre>
* Output only. The search term.
* </pre>
*
* <code>optional string search_term = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @param value The bytes for searchTerm to set.
* @return This builder for chaining.
*/
public Builder setSearchTermBytes(
com.google.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
checkByteStringIsUtf8(value);
searchTerm_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private java.lang.Object campaign_ = "";
/**
* <pre>
* Output only. The campaign the search term served in.
* </pre>
*
* <code>optional string campaign = 3 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code>
* @return Whether the campaign field is set.
*/
public boolean hasCampaign() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
* <pre>
* Output only. The campaign the search term served in.
* </pre>
*
* <code>optional string campaign = 3 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code>
* @return The campaign.
*/
public java.lang.String getCampaign() {
java.lang.Object ref = campaign_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
campaign_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* Output only. The campaign the search term served in.
* </pre>
*
* <code>optional string campaign = 3 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code>
* @return The bytes for campaign.
*/
public com.google.protobuf.ByteString
getCampaignBytes() {
java.lang.Object ref = campaign_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
campaign_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* Output only. The campaign the search term served in.
* </pre>
*
* <code>optional string campaign = 3 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code>
* @param value The campaign to set.
* @return This builder for chaining.
*/
public Builder setCampaign(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
campaign_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* <pre>
* Output only. The campaign the search term served in.
* </pre>
*
* <code>optional string campaign = 3 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code>
* @return This builder for chaining.
*/
public Builder clearCampaign() {
campaign_ = getDefaultInstance().getCampaign();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
* <pre>
* Output only. The campaign the search term served in.
* </pre>
*
* <code>optional string campaign = 3 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code>
* @param value The bytes for campaign to set.
* @return This builder for chaining.
*/
public Builder setCampaignBytes(
com.google.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
checkByteStringIsUtf8(value);
campaign_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v21.resources.CampaignSearchTermView)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v21.resources.CampaignSearchTermView)
private static final com.google.ads.googleads.v21.resources.CampaignSearchTermView DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v21.resources.CampaignSearchTermView();
}
public static com.google.ads.googleads.v21.resources.CampaignSearchTermView getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CampaignSearchTermView>
PARSER = new com.google.protobuf.AbstractParser<CampaignSearchTermView>() {
@java.lang.Override
public CampaignSearchTermView parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<CampaignSearchTermView> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CampaignSearchTermView> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v21.resources.CampaignSearchTermView getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 34,960 | java-dialogflow/proto-google-cloud-dialogflow-v2beta1/src/main/java/com/google/cloud/dialogflow/v2beta1/CompileSuggestionResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/dialogflow/v2beta1/participant.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.dialogflow.v2beta1;
/**
*
*
* <pre>
* The response message for
* [Participants.CompileSuggestion][google.cloud.dialogflow.v2beta1.Participants.CompileSuggestion].
* </pre>
*
* Protobuf type {@code google.cloud.dialogflow.v2beta1.CompileSuggestionResponse}
*/
@java.lang.Deprecated
public final class CompileSuggestionResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.dialogflow.v2beta1.CompileSuggestionResponse)
CompileSuggestionResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use CompileSuggestionResponse.newBuilder() to construct.
private CompileSuggestionResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CompileSuggestionResponse() {
latestMessage_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new CompileSuggestionResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.v2beta1.ParticipantProto
.internal_static_google_cloud_dialogflow_v2beta1_CompileSuggestionResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.v2beta1.ParticipantProto
.internal_static_google_cloud_dialogflow_v2beta1_CompileSuggestionResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.v2beta1.CompileSuggestionResponse.class,
com.google.cloud.dialogflow.v2beta1.CompileSuggestionResponse.Builder.class);
}
private int bitField0_;
public static final int SUGGESTION_FIELD_NUMBER = 1;
private com.google.cloud.dialogflow.v2beta1.Suggestion suggestion_;
/**
*
*
* <pre>
* The compiled suggestion.
* </pre>
*
* <code>.google.cloud.dialogflow.v2beta1.Suggestion suggestion = 1;</code>
*
* @return Whether the suggestion field is set.
*/
@java.lang.Override
public boolean hasSuggestion() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* The compiled suggestion.
* </pre>
*
* <code>.google.cloud.dialogflow.v2beta1.Suggestion suggestion = 1;</code>
*
* @return The suggestion.
*/
@java.lang.Override
public com.google.cloud.dialogflow.v2beta1.Suggestion getSuggestion() {
return suggestion_ == null
? com.google.cloud.dialogflow.v2beta1.Suggestion.getDefaultInstance()
: suggestion_;
}
/**
*
*
* <pre>
* The compiled suggestion.
* </pre>
*
* <code>.google.cloud.dialogflow.v2beta1.Suggestion suggestion = 1;</code>
*/
@java.lang.Override
public com.google.cloud.dialogflow.v2beta1.SuggestionOrBuilder getSuggestionOrBuilder() {
return suggestion_ == null
? com.google.cloud.dialogflow.v2beta1.Suggestion.getDefaultInstance()
: suggestion_;
}
public static final int LATEST_MESSAGE_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object latestMessage_ = "";
/**
*
*
* <pre>
* The name of the latest conversation message used to compile
* suggestion for.
*
* Format: `projects/<Project ID>/locations/<Location
* ID>/conversations/<Conversation ID>/messages/<Message ID>`.
* </pre>
*
* <code>string latest_message = 2;</code>
*
* @return The latestMessage.
*/
@java.lang.Override
public java.lang.String getLatestMessage() {
java.lang.Object ref = latestMessage_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
latestMessage_ = s;
return s;
}
}
/**
*
*
* <pre>
* The name of the latest conversation message used to compile
* suggestion for.
*
* Format: `projects/<Project ID>/locations/<Location
* ID>/conversations/<Conversation ID>/messages/<Message ID>`.
* </pre>
*
* <code>string latest_message = 2;</code>
*
* @return The bytes for latestMessage.
*/
@java.lang.Override
public com.google.protobuf.ByteString getLatestMessageBytes() {
java.lang.Object ref = latestMessage_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
latestMessage_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int CONTEXT_SIZE_FIELD_NUMBER = 3;
private int contextSize_ = 0;
/**
*
*
* <pre>
* Number of messages prior to and including
* [latest_message][google.cloud.dialogflow.v2beta1.CompileSuggestionResponse.latest_message]
* to compile the suggestion. It may be smaller than the
* [CompileSuggestionRequest.context_size][google.cloud.dialogflow.v2beta1.CompileSuggestionRequest.context_size]
* field in the request if there aren't that many messages in the
* conversation.
* </pre>
*
* <code>int32 context_size = 3;</code>
*
* @return The contextSize.
*/
@java.lang.Override
public int getContextSize() {
return contextSize_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getSuggestion());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(latestMessage_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, latestMessage_);
}
if (contextSize_ != 0) {
output.writeInt32(3, contextSize_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getSuggestion());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(latestMessage_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, latestMessage_);
}
if (contextSize_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(3, contextSize_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.dialogflow.v2beta1.CompileSuggestionResponse)) {
return super.equals(obj);
}
com.google.cloud.dialogflow.v2beta1.CompileSuggestionResponse other =
(com.google.cloud.dialogflow.v2beta1.CompileSuggestionResponse) obj;
if (hasSuggestion() != other.hasSuggestion()) return false;
if (hasSuggestion()) {
if (!getSuggestion().equals(other.getSuggestion())) return false;
}
if (!getLatestMessage().equals(other.getLatestMessage())) return false;
if (getContextSize() != other.getContextSize()) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasSuggestion()) {
hash = (37 * hash) + SUGGESTION_FIELD_NUMBER;
hash = (53 * hash) + getSuggestion().hashCode();
}
hash = (37 * hash) + LATEST_MESSAGE_FIELD_NUMBER;
hash = (53 * hash) + getLatestMessage().hashCode();
hash = (37 * hash) + CONTEXT_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getContextSize();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.dialogflow.v2beta1.CompileSuggestionResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.v2beta1.CompileSuggestionResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2beta1.CompileSuggestionResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.v2beta1.CompileSuggestionResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2beta1.CompileSuggestionResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.v2beta1.CompileSuggestionResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2beta1.CompileSuggestionResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.v2beta1.CompileSuggestionResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2beta1.CompileSuggestionResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.v2beta1.CompileSuggestionResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2beta1.CompileSuggestionResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.v2beta1.CompileSuggestionResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.dialogflow.v2beta1.CompileSuggestionResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The response message for
* [Participants.CompileSuggestion][google.cloud.dialogflow.v2beta1.Participants.CompileSuggestion].
* </pre>
*
* Protobuf type {@code google.cloud.dialogflow.v2beta1.CompileSuggestionResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.dialogflow.v2beta1.CompileSuggestionResponse)
com.google.cloud.dialogflow.v2beta1.CompileSuggestionResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.v2beta1.ParticipantProto
.internal_static_google_cloud_dialogflow_v2beta1_CompileSuggestionResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.v2beta1.ParticipantProto
.internal_static_google_cloud_dialogflow_v2beta1_CompileSuggestionResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.v2beta1.CompileSuggestionResponse.class,
com.google.cloud.dialogflow.v2beta1.CompileSuggestionResponse.Builder.class);
}
// Construct using com.google.cloud.dialogflow.v2beta1.CompileSuggestionResponse.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getSuggestionFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
suggestion_ = null;
if (suggestionBuilder_ != null) {
suggestionBuilder_.dispose();
suggestionBuilder_ = null;
}
latestMessage_ = "";
contextSize_ = 0;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.dialogflow.v2beta1.ParticipantProto
.internal_static_google_cloud_dialogflow_v2beta1_CompileSuggestionResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.dialogflow.v2beta1.CompileSuggestionResponse
getDefaultInstanceForType() {
return com.google.cloud.dialogflow.v2beta1.CompileSuggestionResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.dialogflow.v2beta1.CompileSuggestionResponse build() {
com.google.cloud.dialogflow.v2beta1.CompileSuggestionResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.dialogflow.v2beta1.CompileSuggestionResponse buildPartial() {
com.google.cloud.dialogflow.v2beta1.CompileSuggestionResponse result =
new com.google.cloud.dialogflow.v2beta1.CompileSuggestionResponse(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.dialogflow.v2beta1.CompileSuggestionResponse result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.suggestion_ = suggestionBuilder_ == null ? suggestion_ : suggestionBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.latestMessage_ = latestMessage_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.contextSize_ = contextSize_;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.dialogflow.v2beta1.CompileSuggestionResponse) {
return mergeFrom((com.google.cloud.dialogflow.v2beta1.CompileSuggestionResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.dialogflow.v2beta1.CompileSuggestionResponse other) {
if (other
== com.google.cloud.dialogflow.v2beta1.CompileSuggestionResponse.getDefaultInstance())
return this;
if (other.hasSuggestion()) {
mergeSuggestion(other.getSuggestion());
}
if (!other.getLatestMessage().isEmpty()) {
latestMessage_ = other.latestMessage_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.getContextSize() != 0) {
setContextSize(other.getContextSize());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getSuggestionFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
latestMessage_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 24:
{
contextSize_ = input.readInt32();
bitField0_ |= 0x00000004;
break;
} // case 24
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.cloud.dialogflow.v2beta1.Suggestion suggestion_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dialogflow.v2beta1.Suggestion,
com.google.cloud.dialogflow.v2beta1.Suggestion.Builder,
com.google.cloud.dialogflow.v2beta1.SuggestionOrBuilder>
suggestionBuilder_;
/**
*
*
* <pre>
* The compiled suggestion.
* </pre>
*
* <code>.google.cloud.dialogflow.v2beta1.Suggestion suggestion = 1;</code>
*
* @return Whether the suggestion field is set.
*/
public boolean hasSuggestion() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* The compiled suggestion.
* </pre>
*
* <code>.google.cloud.dialogflow.v2beta1.Suggestion suggestion = 1;</code>
*
* @return The suggestion.
*/
public com.google.cloud.dialogflow.v2beta1.Suggestion getSuggestion() {
if (suggestionBuilder_ == null) {
return suggestion_ == null
? com.google.cloud.dialogflow.v2beta1.Suggestion.getDefaultInstance()
: suggestion_;
} else {
return suggestionBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* The compiled suggestion.
* </pre>
*
* <code>.google.cloud.dialogflow.v2beta1.Suggestion suggestion = 1;</code>
*/
public Builder setSuggestion(com.google.cloud.dialogflow.v2beta1.Suggestion value) {
if (suggestionBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
suggestion_ = value;
} else {
suggestionBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* The compiled suggestion.
* </pre>
*
* <code>.google.cloud.dialogflow.v2beta1.Suggestion suggestion = 1;</code>
*/
public Builder setSuggestion(
com.google.cloud.dialogflow.v2beta1.Suggestion.Builder builderForValue) {
if (suggestionBuilder_ == null) {
suggestion_ = builderForValue.build();
} else {
suggestionBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* The compiled suggestion.
* </pre>
*
* <code>.google.cloud.dialogflow.v2beta1.Suggestion suggestion = 1;</code>
*/
public Builder mergeSuggestion(com.google.cloud.dialogflow.v2beta1.Suggestion value) {
if (suggestionBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& suggestion_ != null
&& suggestion_ != com.google.cloud.dialogflow.v2beta1.Suggestion.getDefaultInstance()) {
getSuggestionBuilder().mergeFrom(value);
} else {
suggestion_ = value;
}
} else {
suggestionBuilder_.mergeFrom(value);
}
if (suggestion_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* The compiled suggestion.
* </pre>
*
* <code>.google.cloud.dialogflow.v2beta1.Suggestion suggestion = 1;</code>
*/
public Builder clearSuggestion() {
bitField0_ = (bitField0_ & ~0x00000001);
suggestion_ = null;
if (suggestionBuilder_ != null) {
suggestionBuilder_.dispose();
suggestionBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* The compiled suggestion.
* </pre>
*
* <code>.google.cloud.dialogflow.v2beta1.Suggestion suggestion = 1;</code>
*/
public com.google.cloud.dialogflow.v2beta1.Suggestion.Builder getSuggestionBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getSuggestionFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* The compiled suggestion.
* </pre>
*
* <code>.google.cloud.dialogflow.v2beta1.Suggestion suggestion = 1;</code>
*/
public com.google.cloud.dialogflow.v2beta1.SuggestionOrBuilder getSuggestionOrBuilder() {
if (suggestionBuilder_ != null) {
return suggestionBuilder_.getMessageOrBuilder();
} else {
return suggestion_ == null
? com.google.cloud.dialogflow.v2beta1.Suggestion.getDefaultInstance()
: suggestion_;
}
}
/**
*
*
* <pre>
* The compiled suggestion.
* </pre>
*
* <code>.google.cloud.dialogflow.v2beta1.Suggestion suggestion = 1;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dialogflow.v2beta1.Suggestion,
com.google.cloud.dialogflow.v2beta1.Suggestion.Builder,
com.google.cloud.dialogflow.v2beta1.SuggestionOrBuilder>
getSuggestionFieldBuilder() {
if (suggestionBuilder_ == null) {
suggestionBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dialogflow.v2beta1.Suggestion,
com.google.cloud.dialogflow.v2beta1.Suggestion.Builder,
com.google.cloud.dialogflow.v2beta1.SuggestionOrBuilder>(
getSuggestion(), getParentForChildren(), isClean());
suggestion_ = null;
}
return suggestionBuilder_;
}
private java.lang.Object latestMessage_ = "";
/**
*
*
* <pre>
* The name of the latest conversation message used to compile
* suggestion for.
*
* Format: `projects/<Project ID>/locations/<Location
* ID>/conversations/<Conversation ID>/messages/<Message ID>`.
* </pre>
*
* <code>string latest_message = 2;</code>
*
* @return The latestMessage.
*/
public java.lang.String getLatestMessage() {
java.lang.Object ref = latestMessage_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
latestMessage_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The name of the latest conversation message used to compile
* suggestion for.
*
* Format: `projects/<Project ID>/locations/<Location
* ID>/conversations/<Conversation ID>/messages/<Message ID>`.
* </pre>
*
* <code>string latest_message = 2;</code>
*
* @return The bytes for latestMessage.
*/
public com.google.protobuf.ByteString getLatestMessageBytes() {
java.lang.Object ref = latestMessage_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
latestMessage_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The name of the latest conversation message used to compile
* suggestion for.
*
* Format: `projects/<Project ID>/locations/<Location
* ID>/conversations/<Conversation ID>/messages/<Message ID>`.
* </pre>
*
* <code>string latest_message = 2;</code>
*
* @param value The latestMessage to set.
* @return This builder for chaining.
*/
public Builder setLatestMessage(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
latestMessage_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The name of the latest conversation message used to compile
* suggestion for.
*
* Format: `projects/<Project ID>/locations/<Location
* ID>/conversations/<Conversation ID>/messages/<Message ID>`.
* </pre>
*
* <code>string latest_message = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearLatestMessage() {
latestMessage_ = getDefaultInstance().getLatestMessage();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* The name of the latest conversation message used to compile
* suggestion for.
*
* Format: `projects/<Project ID>/locations/<Location
* ID>/conversations/<Conversation ID>/messages/<Message ID>`.
* </pre>
*
* <code>string latest_message = 2;</code>
*
* @param value The bytes for latestMessage to set.
* @return This builder for chaining.
*/
public Builder setLatestMessageBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
latestMessage_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private int contextSize_;
/**
*
*
* <pre>
* Number of messages prior to and including
* [latest_message][google.cloud.dialogflow.v2beta1.CompileSuggestionResponse.latest_message]
* to compile the suggestion. It may be smaller than the
* [CompileSuggestionRequest.context_size][google.cloud.dialogflow.v2beta1.CompileSuggestionRequest.context_size]
* field in the request if there aren't that many messages in the
* conversation.
* </pre>
*
* <code>int32 context_size = 3;</code>
*
* @return The contextSize.
*/
@java.lang.Override
public int getContextSize() {
return contextSize_;
}
/**
*
*
* <pre>
* Number of messages prior to and including
* [latest_message][google.cloud.dialogflow.v2beta1.CompileSuggestionResponse.latest_message]
* to compile the suggestion. It may be smaller than the
* [CompileSuggestionRequest.context_size][google.cloud.dialogflow.v2beta1.CompileSuggestionRequest.context_size]
* field in the request if there aren't that many messages in the
* conversation.
* </pre>
*
* <code>int32 context_size = 3;</code>
*
* @param value The contextSize to set.
* @return This builder for chaining.
*/
public Builder setContextSize(int value) {
contextSize_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Number of messages prior to and including
* [latest_message][google.cloud.dialogflow.v2beta1.CompileSuggestionResponse.latest_message]
* to compile the suggestion. It may be smaller than the
* [CompileSuggestionRequest.context_size][google.cloud.dialogflow.v2beta1.CompileSuggestionRequest.context_size]
* field in the request if there aren't that many messages in the
* conversation.
* </pre>
*
* <code>int32 context_size = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearContextSize() {
bitField0_ = (bitField0_ & ~0x00000004);
contextSize_ = 0;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.dialogflow.v2beta1.CompileSuggestionResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.dialogflow.v2beta1.CompileSuggestionResponse)
private static final com.google.cloud.dialogflow.v2beta1.CompileSuggestionResponse
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.dialogflow.v2beta1.CompileSuggestionResponse();
}
public static com.google.cloud.dialogflow.v2beta1.CompileSuggestionResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CompileSuggestionResponse> PARSER =
new com.google.protobuf.AbstractParser<CompileSuggestionResponse>() {
@java.lang.Override
public CompileSuggestionResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<CompileSuggestionResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CompileSuggestionResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.dialogflow.v2beta1.CompileSuggestionResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/sdk-platform-java | 34,913 | java-showcase/proto-gapic-showcase-v1beta1/src/main/java/com/google/showcase/v1beta1/PagedExpandResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: schema/google/showcase/v1beta1/echo.proto
// Protobuf Java Version: 3.25.8
package com.google.showcase.v1beta1;
/**
*
*
* <pre>
* The response for the PagedExpand method.
* </pre>
*
* Protobuf type {@code google.showcase.v1beta1.PagedExpandResponse}
*/
public final class PagedExpandResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.showcase.v1beta1.PagedExpandResponse)
PagedExpandResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use PagedExpandResponse.newBuilder() to construct.
private PagedExpandResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private PagedExpandResponse() {
responses_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new PagedExpandResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.showcase.v1beta1.EchoOuterClass
.internal_static_google_showcase_v1beta1_PagedExpandResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.showcase.v1beta1.EchoOuterClass
.internal_static_google_showcase_v1beta1_PagedExpandResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.showcase.v1beta1.PagedExpandResponse.class,
com.google.showcase.v1beta1.PagedExpandResponse.Builder.class);
}
public static final int RESPONSES_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.showcase.v1beta1.EchoResponse> responses_;
/**
*
*
* <pre>
* The words that were expanded.
* </pre>
*
* <code>repeated .google.showcase.v1beta1.EchoResponse responses = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.showcase.v1beta1.EchoResponse> getResponsesList() {
return responses_;
}
/**
*
*
* <pre>
* The words that were expanded.
* </pre>
*
* <code>repeated .google.showcase.v1beta1.EchoResponse responses = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.showcase.v1beta1.EchoResponseOrBuilder>
getResponsesOrBuilderList() {
return responses_;
}
/**
*
*
* <pre>
* The words that were expanded.
* </pre>
*
* <code>repeated .google.showcase.v1beta1.EchoResponse responses = 1;</code>
*/
@java.lang.Override
public int getResponsesCount() {
return responses_.size();
}
/**
*
*
* <pre>
* The words that were expanded.
* </pre>
*
* <code>repeated .google.showcase.v1beta1.EchoResponse responses = 1;</code>
*/
@java.lang.Override
public com.google.showcase.v1beta1.EchoResponse getResponses(int index) {
return responses_.get(index);
}
/**
*
*
* <pre>
* The words that were expanded.
* </pre>
*
* <code>repeated .google.showcase.v1beta1.EchoResponse responses = 1;</code>
*/
@java.lang.Override
public com.google.showcase.v1beta1.EchoResponseOrBuilder getResponsesOrBuilder(int index) {
return responses_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* The next page token.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* The next page token.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < responses_.size(); i++) {
output.writeMessage(1, responses_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < responses_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, responses_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.showcase.v1beta1.PagedExpandResponse)) {
return super.equals(obj);
}
com.google.showcase.v1beta1.PagedExpandResponse other =
(com.google.showcase.v1beta1.PagedExpandResponse) obj;
if (!getResponsesList().equals(other.getResponsesList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getResponsesCount() > 0) {
hash = (37 * hash) + RESPONSES_FIELD_NUMBER;
hash = (53 * hash) + getResponsesList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.showcase.v1beta1.PagedExpandResponse parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.showcase.v1beta1.PagedExpandResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.showcase.v1beta1.PagedExpandResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.showcase.v1beta1.PagedExpandResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.showcase.v1beta1.PagedExpandResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.showcase.v1beta1.PagedExpandResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.showcase.v1beta1.PagedExpandResponse parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.showcase.v1beta1.PagedExpandResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.showcase.v1beta1.PagedExpandResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.showcase.v1beta1.PagedExpandResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.showcase.v1beta1.PagedExpandResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.showcase.v1beta1.PagedExpandResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.showcase.v1beta1.PagedExpandResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The response for the PagedExpand method.
* </pre>
*
* Protobuf type {@code google.showcase.v1beta1.PagedExpandResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.showcase.v1beta1.PagedExpandResponse)
com.google.showcase.v1beta1.PagedExpandResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.showcase.v1beta1.EchoOuterClass
.internal_static_google_showcase_v1beta1_PagedExpandResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.showcase.v1beta1.EchoOuterClass
.internal_static_google_showcase_v1beta1_PagedExpandResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.showcase.v1beta1.PagedExpandResponse.class,
com.google.showcase.v1beta1.PagedExpandResponse.Builder.class);
}
// Construct using com.google.showcase.v1beta1.PagedExpandResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (responsesBuilder_ == null) {
responses_ = java.util.Collections.emptyList();
} else {
responses_ = null;
responsesBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.showcase.v1beta1.EchoOuterClass
.internal_static_google_showcase_v1beta1_PagedExpandResponse_descriptor;
}
@java.lang.Override
public com.google.showcase.v1beta1.PagedExpandResponse getDefaultInstanceForType() {
return com.google.showcase.v1beta1.PagedExpandResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.showcase.v1beta1.PagedExpandResponse build() {
com.google.showcase.v1beta1.PagedExpandResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.showcase.v1beta1.PagedExpandResponse buildPartial() {
com.google.showcase.v1beta1.PagedExpandResponse result =
new com.google.showcase.v1beta1.PagedExpandResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.showcase.v1beta1.PagedExpandResponse result) {
if (responsesBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
responses_ = java.util.Collections.unmodifiableList(responses_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.responses_ = responses_;
} else {
result.responses_ = responsesBuilder_.build();
}
}
private void buildPartial0(com.google.showcase.v1beta1.PagedExpandResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.showcase.v1beta1.PagedExpandResponse) {
return mergeFrom((com.google.showcase.v1beta1.PagedExpandResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.showcase.v1beta1.PagedExpandResponse other) {
if (other == com.google.showcase.v1beta1.PagedExpandResponse.getDefaultInstance())
return this;
if (responsesBuilder_ == null) {
if (!other.responses_.isEmpty()) {
if (responses_.isEmpty()) {
responses_ = other.responses_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureResponsesIsMutable();
responses_.addAll(other.responses_);
}
onChanged();
}
} else {
if (!other.responses_.isEmpty()) {
if (responsesBuilder_.isEmpty()) {
responsesBuilder_.dispose();
responsesBuilder_ = null;
responses_ = other.responses_;
bitField0_ = (bitField0_ & ~0x00000001);
responsesBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getResponsesFieldBuilder()
: null;
} else {
responsesBuilder_.addAllMessages(other.responses_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.showcase.v1beta1.EchoResponse m =
input.readMessage(
com.google.showcase.v1beta1.EchoResponse.parser(), extensionRegistry);
if (responsesBuilder_ == null) {
ensureResponsesIsMutable();
responses_.add(m);
} else {
responsesBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.showcase.v1beta1.EchoResponse> responses_ =
java.util.Collections.emptyList();
private void ensureResponsesIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
responses_ = new java.util.ArrayList<com.google.showcase.v1beta1.EchoResponse>(responses_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.showcase.v1beta1.EchoResponse,
com.google.showcase.v1beta1.EchoResponse.Builder,
com.google.showcase.v1beta1.EchoResponseOrBuilder>
responsesBuilder_;
/**
*
*
* <pre>
* The words that were expanded.
* </pre>
*
* <code>repeated .google.showcase.v1beta1.EchoResponse responses = 1;</code>
*/
public java.util.List<com.google.showcase.v1beta1.EchoResponse> getResponsesList() {
if (responsesBuilder_ == null) {
return java.util.Collections.unmodifiableList(responses_);
} else {
return responsesBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The words that were expanded.
* </pre>
*
* <code>repeated .google.showcase.v1beta1.EchoResponse responses = 1;</code>
*/
public int getResponsesCount() {
if (responsesBuilder_ == null) {
return responses_.size();
} else {
return responsesBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The words that were expanded.
* </pre>
*
* <code>repeated .google.showcase.v1beta1.EchoResponse responses = 1;</code>
*/
public com.google.showcase.v1beta1.EchoResponse getResponses(int index) {
if (responsesBuilder_ == null) {
return responses_.get(index);
} else {
return responsesBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The words that were expanded.
* </pre>
*
* <code>repeated .google.showcase.v1beta1.EchoResponse responses = 1;</code>
*/
public Builder setResponses(int index, com.google.showcase.v1beta1.EchoResponse value) {
if (responsesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureResponsesIsMutable();
responses_.set(index, value);
onChanged();
} else {
responsesBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The words that were expanded.
* </pre>
*
* <code>repeated .google.showcase.v1beta1.EchoResponse responses = 1;</code>
*/
public Builder setResponses(
int index, com.google.showcase.v1beta1.EchoResponse.Builder builderForValue) {
if (responsesBuilder_ == null) {
ensureResponsesIsMutable();
responses_.set(index, builderForValue.build());
onChanged();
} else {
responsesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The words that were expanded.
* </pre>
*
* <code>repeated .google.showcase.v1beta1.EchoResponse responses = 1;</code>
*/
public Builder addResponses(com.google.showcase.v1beta1.EchoResponse value) {
if (responsesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureResponsesIsMutable();
responses_.add(value);
onChanged();
} else {
responsesBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The words that were expanded.
* </pre>
*
* <code>repeated .google.showcase.v1beta1.EchoResponse responses = 1;</code>
*/
public Builder addResponses(int index, com.google.showcase.v1beta1.EchoResponse value) {
if (responsesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureResponsesIsMutable();
responses_.add(index, value);
onChanged();
} else {
responsesBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The words that were expanded.
* </pre>
*
* <code>repeated .google.showcase.v1beta1.EchoResponse responses = 1;</code>
*/
public Builder addResponses(com.google.showcase.v1beta1.EchoResponse.Builder builderForValue) {
if (responsesBuilder_ == null) {
ensureResponsesIsMutable();
responses_.add(builderForValue.build());
onChanged();
} else {
responsesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The words that were expanded.
* </pre>
*
* <code>repeated .google.showcase.v1beta1.EchoResponse responses = 1;</code>
*/
public Builder addResponses(
int index, com.google.showcase.v1beta1.EchoResponse.Builder builderForValue) {
if (responsesBuilder_ == null) {
ensureResponsesIsMutable();
responses_.add(index, builderForValue.build());
onChanged();
} else {
responsesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The words that were expanded.
* </pre>
*
* <code>repeated .google.showcase.v1beta1.EchoResponse responses = 1;</code>
*/
public Builder addAllResponses(
java.lang.Iterable<? extends com.google.showcase.v1beta1.EchoResponse> values) {
if (responsesBuilder_ == null) {
ensureResponsesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, responses_);
onChanged();
} else {
responsesBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The words that were expanded.
* </pre>
*
* <code>repeated .google.showcase.v1beta1.EchoResponse responses = 1;</code>
*/
public Builder clearResponses() {
if (responsesBuilder_ == null) {
responses_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
responsesBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The words that were expanded.
* </pre>
*
* <code>repeated .google.showcase.v1beta1.EchoResponse responses = 1;</code>
*/
public Builder removeResponses(int index) {
if (responsesBuilder_ == null) {
ensureResponsesIsMutable();
responses_.remove(index);
onChanged();
} else {
responsesBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The words that were expanded.
* </pre>
*
* <code>repeated .google.showcase.v1beta1.EchoResponse responses = 1;</code>
*/
public com.google.showcase.v1beta1.EchoResponse.Builder getResponsesBuilder(int index) {
return getResponsesFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The words that were expanded.
* </pre>
*
* <code>repeated .google.showcase.v1beta1.EchoResponse responses = 1;</code>
*/
public com.google.showcase.v1beta1.EchoResponseOrBuilder getResponsesOrBuilder(int index) {
if (responsesBuilder_ == null) {
return responses_.get(index);
} else {
return responsesBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The words that were expanded.
* </pre>
*
* <code>repeated .google.showcase.v1beta1.EchoResponse responses = 1;</code>
*/
public java.util.List<? extends com.google.showcase.v1beta1.EchoResponseOrBuilder>
getResponsesOrBuilderList() {
if (responsesBuilder_ != null) {
return responsesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(responses_);
}
}
/**
*
*
* <pre>
* The words that were expanded.
* </pre>
*
* <code>repeated .google.showcase.v1beta1.EchoResponse responses = 1;</code>
*/
public com.google.showcase.v1beta1.EchoResponse.Builder addResponsesBuilder() {
return getResponsesFieldBuilder()
.addBuilder(com.google.showcase.v1beta1.EchoResponse.getDefaultInstance());
}
/**
*
*
* <pre>
* The words that were expanded.
* </pre>
*
* <code>repeated .google.showcase.v1beta1.EchoResponse responses = 1;</code>
*/
public com.google.showcase.v1beta1.EchoResponse.Builder addResponsesBuilder(int index) {
return getResponsesFieldBuilder()
.addBuilder(index, com.google.showcase.v1beta1.EchoResponse.getDefaultInstance());
}
/**
*
*
* <pre>
* The words that were expanded.
* </pre>
*
* <code>repeated .google.showcase.v1beta1.EchoResponse responses = 1;</code>
*/
public java.util.List<com.google.showcase.v1beta1.EchoResponse.Builder>
getResponsesBuilderList() {
return getResponsesFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.showcase.v1beta1.EchoResponse,
com.google.showcase.v1beta1.EchoResponse.Builder,
com.google.showcase.v1beta1.EchoResponseOrBuilder>
getResponsesFieldBuilder() {
if (responsesBuilder_ == null) {
responsesBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.showcase.v1beta1.EchoResponse,
com.google.showcase.v1beta1.EchoResponse.Builder,
com.google.showcase.v1beta1.EchoResponseOrBuilder>(
responses_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
responses_ = null;
}
return responsesBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* The next page token.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The next page token.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The next page token.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The next page token.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* The next page token.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.showcase.v1beta1.PagedExpandResponse)
}
// @@protoc_insertion_point(class_scope:google.showcase.v1beta1.PagedExpandResponse)
private static final com.google.showcase.v1beta1.PagedExpandResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.showcase.v1beta1.PagedExpandResponse();
}
public static com.google.showcase.v1beta1.PagedExpandResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<PagedExpandResponse> PARSER =
new com.google.protobuf.AbstractParser<PagedExpandResponse>() {
@java.lang.Override
public PagedExpandResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<PagedExpandResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<PagedExpandResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.showcase.v1beta1.PagedExpandResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
oracle/graal | 35,051 | compiler/src/jdk.graal.compiler/src/jdk/graal/compiler/lir/alloc/lsra/LinearScan.java | /*
* Copyright (c) 2009, 2025, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package jdk.graal.compiler.lir.alloc.lsra;
import static jdk.vm.ci.code.CodeUtil.isEven;
import static jdk.vm.ci.code.ValueUtil.asRegister;
import static jdk.vm.ci.code.ValueUtil.isIllegal;
import static jdk.vm.ci.code.ValueUtil.isLegal;
import static jdk.vm.ci.code.ValueUtil.isRegister;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.graalvm.collections.Pair;
import jdk.graal.compiler.core.common.LIRKind;
import jdk.graal.compiler.core.common.NumUtil;
import jdk.graal.compiler.core.common.alloc.RegisterAllocationConfig;
import jdk.graal.compiler.core.common.cfg.BasicBlock;
import jdk.graal.compiler.core.common.cfg.BlockMap;
import jdk.graal.compiler.debug.Assertions;
import jdk.graal.compiler.debug.CounterKey;
import jdk.graal.compiler.debug.DebugCloseable;
import jdk.graal.compiler.debug.DebugContext;
import jdk.graal.compiler.debug.GraalError;
import jdk.graal.compiler.debug.Indent;
import jdk.graal.compiler.lir.LIR;
import jdk.graal.compiler.lir.LIRInstruction;
import jdk.graal.compiler.lir.LIRValueUtil;
import jdk.graal.compiler.lir.Variable;
import jdk.graal.compiler.lir.VirtualStackSlot;
import jdk.graal.compiler.lir.framemap.FrameMapBuilder;
import jdk.graal.compiler.lir.gen.LIRGenerationResult;
import jdk.graal.compiler.lir.gen.MoveFactory;
import jdk.graal.compiler.lir.phases.AllocationPhase;
import jdk.graal.compiler.lir.phases.LIRPhase;
import jdk.graal.compiler.options.NestedBooleanOptionKey;
import jdk.graal.compiler.options.Option;
import jdk.graal.compiler.options.OptionKey;
import jdk.graal.compiler.options.OptionType;
import jdk.graal.compiler.options.OptionValues;
import jdk.vm.ci.code.Register;
import jdk.vm.ci.code.RegisterAttributes;
import jdk.vm.ci.code.RegisterValue;
import jdk.vm.ci.code.TargetDescription;
import jdk.vm.ci.meta.AllocatableValue;
import jdk.vm.ci.meta.Value;
/**
* An implementation of the linear scan register allocator algorithm described in
* <a href="http://doi.acm.org/10.1145/1064979.1064998" > "Optimized Interval Splitting in a Linear
* Scan Register Allocator"</a> by Christian Wimmer and Hanspeter Moessenboeck.
*/
public class LinearScan {
protected boolean isDetailedAsserts() {
return Assertions.assertionsEnabled() && detailedAsserts;
}
/**
* These timers can significantly affect the speed of linear scan so they are disabled by
* default.
*/
static final boolean DETAILED_TIMERS = false;
static CounterKey counter(String name) {
if (DETAILED_TIMERS) {
return DebugContext.counter(name);
}
return null;
}
static DebugContext.CountingTimerKey countingTimer(String name) {
if (DETAILED_TIMERS) {
return DebugContext.countingTimer(name);
}
return null;
}
DebugCloseable start(DebugContext.CountingTimerKey key) {
if (key != null) {
return key.start(debug);
}
return null;
}
void increment(CounterKey key) {
if (key != null) {
key.increment(debug);
}
}
public static class Options {
// @formatter:off
@Option(help = "Enable spill position optimization", type = OptionType.Debug)
public static final OptionKey<Boolean> LIROptLSRAOptimizeSpillPosition = new NestedBooleanOptionKey(LIRPhase.Options.LIROptimization, true);
@Option(help = "Use binary search if interval is longer than this limit", type = OptionType.Debug)
public static final OptionKey<Integer> IntervalBinarySearchLimit = new OptionKey<>(100);
// @formatter:on
}
public static class BlockData {
/**
* Bit map specifying which operands are live upon entry to this block. These are values
* used in this block or any of its successors where such value are not defined in this
* block. The bit index of an operand is its {@linkplain LinearScan#operandNumber(Value)
* operand number}.
*/
public SparseBitSet liveIn;
/**
* Bit map specifying which operands are live upon exit from this block. These are values
* used in a successor block that are either defined in this block or were live upon entry
* to this block. The bit index of an operand is its
* {@linkplain LinearScan#operandNumber(Value) operand number}.
*/
public SparseBitSet liveOut;
/**
* Bit map specifying which operands are used (before being defined) in this block. That is,
* these are the values that are live upon entry to the block. The bit index of an operand
* is its {@linkplain LinearScan#operandNumber(Value) operand number}.
*/
public SparseBitSet liveGen;
/**
* Bit map specifying which operands are defined/overwritten in this block. The bit index of
* an operand is its {@linkplain LinearScan#operandNumber(Value) operand number}.
*/
public SparseBitSet liveKill;
/**
* State used during {@link LinearScanLifetimeAnalysisPhase#computeGlobalLiveSets()} to
* create a worklist.
*/
boolean dirty = true;
}
public static final int DOMINATOR_SPILL_MOVE_ID = -2;
private static final int SPLIT_INTERVALS_CAPACITY_RIGHT_SHIFT = 1;
/**
* Maximum number of unsorted intervals we consider "almost sorted" and cheap to sort in-place
* with insertion sort, i.e. not worth affording a worst case O(n log(n)) sorting algorithm.
*/
private static final int ALMOST_SORTED_THRESHOLD = 64;
private final LIR ir;
private final FrameMapBuilder frameMapBuilder;
private final List<RegisterAttributes> registerAttributes;
private final List<Register> registers;
private final RegisterAllocationConfig regAllocConfig;
private final MoveFactory moveFactory;
private final BlockMap<BlockData> blockData;
protected final DebugContext debug;
/**
* List of blocks in linear-scan order. This is only correct as long as the CFG does not change.
*/
private final int[] sortedBlocks;
/**
* @see #intervals()
*/
private Interval[] intervals;
/**
* The number of valid entries in {@link #intervals}.
*/
private int intervalsSize;
/**
* The index of the first entry in {@link #intervals} for a
* {@linkplain #createDerivedInterval(Interval) derived interval}.
*/
private int firstDerivedIntervalIndex = -1;
/**
* Intervals sorted by {@link Interval#from()}.
*/
private Interval[] sortedIntervals;
/**
* Map from an instruction {@linkplain LIRInstruction#id() id} to the instruction. Entries
* should be retrieved with {@link #instructionForId(int)} as the id is not simply an index into
* this array.
*/
private LIRInstruction[] opIdToInstructionMap;
/**
* Map from an instruction {@linkplain LIRInstruction#id() id} to the {@linkplain BasicBlock
* block} containing the instruction. Entries should be retrieved with {@link #blockForId(int)}
* as the id is not simply an index into this array.
*/
private BasicBlock<?>[] opIdToBlockMap;
/**
* The {@linkplain #operandNumber(Value) number} of the first variable operand allocated.
*/
private final int firstVariableNumber;
/**
* Number of variables.
*/
private int numVariables;
private final boolean neverSpillConstants;
/**
* Sentinel interval to denote the end of an interval list.
*/
protected final Interval intervalEndMarker;
private final boolean detailedAsserts;
private final LIRGenerationResult res;
public final int intervalBinarySearchLimit;
@SuppressWarnings("this-escape")
protected LinearScan(TargetDescription target, LIRGenerationResult res, MoveFactory spillMoveFactory, RegisterAllocationConfig regAllocConfig, int[] sortedBlocks,
boolean neverSpillConstants) {
this.ir = res.getLIR();
this.res = res;
this.debug = ir.getDebug();
this.moveFactory = spillMoveFactory;
this.frameMapBuilder = res.getFrameMapBuilder();
this.sortedBlocks = sortedBlocks;
this.registerAttributes = regAllocConfig.getRegisterConfig().getAttributesMap();
this.regAllocConfig = regAllocConfig;
this.registers = target.arch.getRegisters();
this.firstVariableNumber = getRegisters().size();
this.numVariables = ir.numVariables();
this.blockData = new BlockMap<>(ir.getControlFlowGraph());
this.neverSpillConstants = neverSpillConstants;
this.intervalEndMarker = new Interval(Value.ILLEGAL, Interval.END_MARKER_OPERAND_NUMBER, null);
this.intervalEndMarker.next = intervalEndMarker;
this.detailedAsserts = Assertions.detailedAssertionsEnabled(ir.getOptions());
this.intervalBinarySearchLimit = Options.IntervalBinarySearchLimit.getValue(ir.getOptions());
}
/**
* Compute the variable number of the given operand.
*
* @param operand
* @return the variable number of the supplied operand or {@code -1} if the supplied operand
* describes a register
*/
public int getVariableNumber(int operand) {
// check if it's a variable
if (operand >= firstVariableNumber) {
return operand - firstVariableNumber;
}
// register case
return -1;
}
public LIRGenerationResult getLIRGenerationResult() {
return res;
}
public OptionValues getOptions() {
return ir.getOptions();
}
public DebugContext getDebug() {
return debug;
}
public int getFirstLirInstructionId(BasicBlock<?> block) {
int result = ir.getLIRforBlock(block).getFirst().id();
assert NumUtil.assertNonNegativeInt(result);
return result;
}
public int getLastLirInstructionId(BasicBlock<?> block) {
ArrayList<LIRInstruction> instructions = ir.getLIRforBlock(block);
int result = instructions.getLast().id();
assert NumUtil.assertNonNegativeInt(result);
return result;
}
public MoveFactory getSpillMoveFactory() {
return moveFactory;
}
protected MoveResolver createMoveResolver() {
MoveResolver moveResolver = new MoveResolver(this);
assert moveResolver.checkEmpty();
return moveResolver;
}
public static boolean isVariableOrRegister(Value value) {
return LIRValueUtil.isVariable(value) || isRegister(value);
}
/**
* Converts an operand (variable or register) to an index in a flat address space covering all
* the {@linkplain Variable variables} and {@linkplain RegisterValue registers} being processed
* by this allocator.
*/
int operandNumber(Value op) {
Value operand = LIRValueUtil.stripCast(op);
if (isRegister(operand)) {
int number = asRegister(operand).number;
assert number < firstVariableNumber : number + " " + firstVariableNumber;
return number;
}
assert LIRValueUtil.isVariable(operand) : operand;
return firstVariableNumber + LIRValueUtil.asVariable(operand).index;
}
/**
* Gets the number of operands. This value will increase by 1 for new variable.
*/
int operandSize() {
return firstVariableNumber + numVariables;
}
/**
* Gets the highest operand number for a register operand. This value will never change.
*/
int maxRegisterNumber() {
return firstVariableNumber - 1;
}
public BlockData getBlockData(BasicBlock<?> block) {
return blockData.get(block);
}
void initBlockData(BasicBlock<?> block) {
blockData.put(block, new BlockData());
}
static final IntervalPredicate IS_PRECOLORED_INTERVAL = new IntervalPredicate() {
@Override
public boolean apply(Interval i) {
return isRegister(i.operand);
}
};
static final IntervalPredicate IS_VARIABLE_INTERVAL = new IntervalPredicate() {
@Override
public boolean apply(Interval i) {
return LIRValueUtil.isVariable(i.operand);
}
};
/**
* Gets an object describing the attributes of a given register according to this register
* configuration.
*/
public RegisterAttributes attributes(Register reg) {
return registerAttributes.get(reg.number);
}
void assignSpillSlot(Interval interval) {
/*
* Assign the canonical spill slot of the parent (if a part of the interval is already
* spilled) or allocate a new spill slot.
*/
if (interval.canMaterialize()) {
interval.assignLocation(Value.ILLEGAL);
} else if (interval.spillSlot() != null) {
interval.assignLocation(interval.spillSlot());
} else {
VirtualStackSlot slot = frameMapBuilder.allocateSpillSlot(interval.kind());
interval.setSpillSlot(slot);
interval.assignLocation(slot);
}
}
/**
* Map from {@linkplain #operandNumber(Value) operand numbers} to intervals.
*/
public Interval[] intervals() {
return intervals;
}
void initIntervals() {
intervalsSize = operandSize();
intervals = new Interval[intervalsSize + (intervalsSize >> SPLIT_INTERVALS_CAPACITY_RIGHT_SHIFT)];
}
/**
* Creates a new interval.
*
* @param operand the operand for the interval
* @return the created interval
*/
Interval createInterval(AllocatableValue operand) {
assert isLegal(operand);
int operandNumber = operandNumber(operand);
Interval interval = new Interval(operand, operandNumber, intervalEndMarker);
assert operandNumber < intervalsSize : operandNumber + " " + intervalsSize;
assert intervals[operandNumber] == null;
intervals[operandNumber] = interval;
return interval;
}
/**
* Creates an interval as a result of splitting or spilling another interval.
*
* @param source an interval being split of spilled
* @return a new interval derived from {@code source}
*/
Interval createDerivedInterval(Interval source) {
if (firstDerivedIntervalIndex == -1) {
firstDerivedIntervalIndex = intervalsSize;
}
if (intervalsSize == intervals.length) {
intervals = Arrays.copyOf(intervals, intervals.length + (intervals.length >> SPLIT_INTERVALS_CAPACITY_RIGHT_SHIFT) + 1);
}
intervalsSize++;
assert intervalsSize <= intervals.length : intervalsSize + " " + intervals.length;
/*
* Note that these variables are not managed and must therefore never be inserted into the
* LIR
*/
Variable variable = new Variable(source.kind(), numVariables++);
Interval interval = createInterval(variable);
assert intervals[intervalsSize - 1] == interval : intervals[intervalsSize - 1] + " " + interval;
return interval;
}
// access to block list (sorted in linear scan order)
public int blockCount() {
return sortedBlocks.length;
}
public BasicBlock<?> blockAt(int index) {
return ir.getBlockById(sortedBlocks[index]);
}
/**
* Gets the size of the {@link BlockData#liveIn} and {@link BlockData#liveOut} sets for a basic
* block. These sets do not include any operands allocated as a result of creating
* {@linkplain #createDerivedInterval(Interval) derived intervals}.
*/
public int liveSetSize() {
return firstDerivedIntervalIndex == -1 ? operandSize() : firstDerivedIntervalIndex;
}
int numLoops() {
return ir.getControlFlowGraph().getNumberOfLoops();
}
Interval intervalFor(int operandNumber) {
return intervals[operandNumber];
}
public Interval intervalFor(Value operand) {
int operandNumber = operandNumber(operand);
assert operandNumber < intervalsSize : operandNumber + " " + intervalsSize;
return intervals[operandNumber];
}
public Interval getOrCreateInterval(AllocatableValue operand) {
Interval ret = intervalFor(operand);
if (ret == null) {
return createInterval(operand);
} else {
return ret;
}
}
void initOpIdMaps(int numInstructions) {
opIdToInstructionMap = new LIRInstruction[numInstructions];
opIdToBlockMap = new BasicBlock<?>[numInstructions];
}
void putOpIdMaps(int index, LIRInstruction op, BasicBlock<?> block) {
opIdToInstructionMap[index] = op;
opIdToBlockMap[index] = block;
}
/**
* Gets the highest instruction id allocated by this object.
*/
int maxOpId() {
assert opIdToInstructionMap.length > 0 : "no operations";
return (opIdToInstructionMap.length - 1) << 1;
}
/**
* Converts an {@linkplain LIRInstruction#id() instruction id} to an instruction index. All LIR
* instructions in a method have an index one greater than their linear-scan order predecessor
* with the first instruction having an index of 0.
*/
private static int opIdToIndex(int opId) {
return opId >> 1;
}
/**
* Retrieves the {@link LIRInstruction} based on its {@linkplain LIRInstruction#id() id}.
*
* @param opId an instruction {@linkplain LIRInstruction#id() id}
* @return the instruction whose {@linkplain LIRInstruction#id()} {@code == id}
*/
public LIRInstruction instructionForId(int opId) {
assert isEven(opId) : "opId not even";
LIRInstruction instr = opIdToInstructionMap[opIdToIndex(opId)];
assert instr.id() == opId : Assertions.errorMessage(instr, opId);
return instr;
}
/**
* Gets the block containing a given instruction.
*
* @param opId an instruction {@linkplain LIRInstruction#id() id}
* @return the block containing the instruction denoted by {@code opId}
*/
public BasicBlock<?> blockForId(int opId) {
assert opIdToBlockMap.length > 0 && opId >= 0 && opId <= maxOpId() + 1 : "opId out of range";
return opIdToBlockMap[opIdToIndex(opId)];
}
boolean isBlockBegin(int opId) {
return opId == 0 || blockForId(opId) != blockForId(opId - 1);
}
/**
* Determines if an {@link LIRInstruction} destroys all caller saved registers.
*
* @param opId an instruction {@linkplain LIRInstruction#id() id}
* @return {@code true} if the instruction denoted by {@code id} destroys all caller saved
* registers.
*/
boolean hasCall(int opId) {
assert isEven(opId) : "opId not even";
return instructionForId(opId).destroysCallerSavedRegisters();
}
abstract static class IntervalPredicate {
abstract boolean apply(Interval i);
}
public boolean isProcessed(Value operand) {
return !isRegister(operand) || attributes(asRegister(operand)).isAllocatable();
}
// * Phase 5: actual register allocation
private static boolean isSorted(Interval[] intervals) {
int from = -1;
for (Interval interval : intervals) {
assert interval != null;
assert from <= interval.from() : from + " " + interval.from();
from = interval.from();
}
return true;
}
static Interval addToList(Interval first, Interval prev, Interval interval) {
Interval newFirst = first;
if (prev != null) {
prev.next = interval;
} else {
newFirst = interval;
}
return newFirst;
}
Pair<Interval, Interval> createUnhandledLists(IntervalPredicate isList1, IntervalPredicate isList2) {
assert isSorted(sortedIntervals) : "interval list is not sorted";
Interval list1 = intervalEndMarker;
Interval list2 = intervalEndMarker;
Interval list1Prev = null;
Interval list2Prev = null;
Interval v;
int n = sortedIntervals.length;
for (int i = 0; i < n; i++) {
v = sortedIntervals[i];
if (v == null) {
continue;
}
if (isList1.apply(v)) {
list1 = addToList(list1, list1Prev, v);
list1Prev = v;
} else if (isList2 == null || isList2.apply(v)) {
list2 = addToList(list2, list2Prev, v);
list2Prev = v;
}
}
if (list1Prev != null) {
list1Prev.next = intervalEndMarker;
}
if (list2Prev != null) {
list2Prev.next = intervalEndMarker;
}
assert list1Prev == null || list1Prev.next.isEndMarker() : "linear list ends not with sentinel";
assert list2Prev == null || list2Prev.next.isEndMarker() : "linear list ends not with sentinel";
return Pair.create(list1, list2);
}
private static void sortIntervals(Interval[] intervals) {
Arrays.sort(intervals, (Interval a, Interval b) -> a.from() - b.from());
}
protected void sortIntervalsBeforeAllocation() {
int sortedLen = 0;
int notSorted = 0;
int sortedFromMax = -1;
for (Interval interval : intervals) {
if (interval != null) {
sortedLen++;
int from = interval.from();
if (sortedFromMax <= from) {
sortedFromMax = interval.from();
} else {
notSorted++;
}
}
}
Interval[] sortedList = new Interval[sortedLen];
if (notSorted > 0 && notSorted <= ALMOST_SORTED_THRESHOLD) {
// almost sorted, use simple in-place sorting algorithm
sortIntervalsAlmostSorted(intervals, sortedList);
} else {
// already sorted, or a potentially high number of swaps needed
int sortedIdx = 0;
for (Interval interval : intervals) {
if (interval != null) {
sortedList[sortedIdx++] = interval;
}
}
if (notSorted > 0) {
sortIntervals(sortedList);
}
}
sortedIntervals = sortedList;
}
/**
* Sorts intervals using insertion sort (O(n) best case, O(n^2) worse case complexity).
*/
private static void sortIntervalsAlmostSorted(Interval[] intervals, Interval[] sortedList) {
int sortedIdx = 0;
int sortedFromMax = -1;
// special sorting algorithm: the original interval-list is almost sorted,
// only some intervals are swapped. So this is much faster than a complete QuickSort
for (Interval interval : intervals) {
if (interval != null) {
int from = interval.from();
if (sortedFromMax <= from) {
sortedList[sortedIdx++] = interval;
sortedFromMax = interval.from();
} else {
// the assumption that the intervals are already sorted failed,
// so this interval must be sorted in manually
int j;
for (j = sortedIdx - 1; j >= 0 && from < sortedList[j].from(); j--) {
sortedList[j + 1] = sortedList[j];
}
sortedList[j + 1] = interval;
sortedIdx++;
}
}
}
}
void sortIntervalsAfterAllocation() {
if (firstDerivedIntervalIndex == -1) {
// no intervals have been added during allocation, so sorted list is already up to date
return;
}
Interval[] oldList = sortedIntervals;
Interval[] newList = Arrays.copyOfRange(intervals, firstDerivedIntervalIndex, intervalsSize);
int oldLen = oldList.length;
int newLen = newList.length;
// conventional sort-algorithm for new intervals
sortIntervals(newList);
// merge old and new list (both already sorted) into one combined list
Interval[] combinedList = new Interval[oldLen + newLen];
int oldIdx = 0;
int newIdx = 0;
while (oldIdx + newIdx < combinedList.length) {
if (newIdx >= newLen || (oldIdx < oldLen && oldList[oldIdx].from() <= newList[newIdx].from())) {
combinedList[oldIdx + newIdx] = oldList[oldIdx];
oldIdx++;
} else {
combinedList[oldIdx + newIdx] = newList[newIdx];
newIdx++;
}
}
sortedIntervals = combinedList;
}
// wrapper for Interval.splitChildAtOpId that performs a bailout in product mode
// instead of returning null
public Interval splitChildAtOpId(Interval interval, int opId, LIRInstruction.OperandMode mode) {
Interval result = interval.getSplitChildAtOpId(opId, mode, this);
if (result != null) {
if (debug.isLogEnabled()) {
debug.log("Split child at pos %d of interval %s is %s", opId, interval, result);
}
return result;
}
throw new GraalError("LinearScan: interval is null");
}
static AllocatableValue canonicalSpillOpr(Interval interval) {
assert interval.spillSlot() != null : "canonical spill slot not set";
return interval.spillSlot();
}
boolean isMaterialized(AllocatableValue operand, int opId, LIRInstruction.OperandMode mode) {
Interval interval = intervalFor(operand);
assert interval != null : "interval must exist";
if (opId != -1) {
/*
* Operands are not changed when an interval is split during allocation, so search the
* right interval here.
*/
interval = splitChildAtOpId(interval, opId, mode);
}
return isIllegal(interval.location()) && interval.canMaterialize();
}
boolean isCallerSave(Value operand) {
return attributes(asRegister(operand)).isCallerSave();
}
@SuppressWarnings("try")
protected void allocate(TargetDescription target, LIRGenerationResult lirGenRes, AllocationPhase.AllocationContext context) {
/*
* This is the point to enable debug logging for the whole register allocation.
*/
try (Indent indent = debug.logAndIndent("LinearScan allocate")) {
createLifetimeAnalysisPhase().apply(target, lirGenRes, context);
try (DebugContext.Scope s = debug.scope("AfterLifetimeAnalysis", (Object) intervals)) {
sortIntervalsBeforeAllocation();
createRegisterAllocationPhase().apply(target, lirGenRes, context);
if (LinearScan.Options.LIROptLSRAOptimizeSpillPosition.getValue(getOptions())) {
createOptimizeSpillPositionPhase().apply(target, lirGenRes, context);
}
createResolveDataFlowPhase().apply(target, lirGenRes, context);
sortIntervalsAfterAllocation();
if (isDetailedAsserts()) {
verify();
}
beforeSpillMoveElimination();
createSpillMoveEliminationPhase().apply(target, lirGenRes, context);
createAssignLocationsPhase().apply(target, lirGenRes, context);
if (isDetailedAsserts()) {
verifyIntervals();
}
} catch (Throwable e) {
throw debug.handle(e);
}
}
}
protected void beforeSpillMoveElimination() {
}
protected LinearScanLifetimeAnalysisPhase createLifetimeAnalysisPhase() {
return new LinearScanLifetimeAnalysisPhase(this);
}
protected LinearScanRegisterAllocationPhase createRegisterAllocationPhase() {
return new LinearScanRegisterAllocationPhase(this);
}
protected LinearScanOptimizeSpillPositionPhase createOptimizeSpillPositionPhase() {
return new LinearScanOptimizeSpillPositionPhase(this);
}
protected LinearScanResolveDataFlowPhase createResolveDataFlowPhase() {
return new LinearScanResolveDataFlowPhase(this);
}
protected LinearScanEliminateSpillMovePhase createSpillMoveEliminationPhase() {
return new LinearScanEliminateSpillMovePhase(this);
}
protected LinearScanAssignLocationsPhase createAssignLocationsPhase() {
return new LinearScanAssignLocationsPhase(this);
}
@SuppressWarnings("try")
public void printIntervals(String label) {
if (debug.isLogEnabled()) {
try (Indent indent = debug.logAndIndent("intervals %s", label)) {
for (Interval interval : intervals) {
if (interval != null) {
debug.log("%s", interval.logString(this));
}
}
try (Indent indent2 = debug.logAndIndent("Basic Blocks")) {
for (int i = 0; i < blockCount(); i++) {
BasicBlock<?> block = blockAt(i);
debug.log("B%d [%d, %d, %s] ", block.getId(), getFirstLirInstructionId(block), getLastLirInstructionId(block), block.getLoop());
}
}
}
}
debug.dump(DebugContext.INFO_LEVEL, new LinearScanIntervalDumper(Arrays.copyOf(intervals, intervalsSize)), label);
}
boolean verify() {
// (check that all intervals have a correct register and that no registers are overwritten)
verifyIntervals();
verifyRegisters();
debug.log("no errors found");
return true;
}
@SuppressWarnings("try")
private void verifyRegisters() {
// Enable this logging to get output for the verification process.
try (Indent indent = debug.logAndIndent("verifying register allocation")) {
RegisterVerifier verifier = new RegisterVerifier(this);
verifier.verify(blockAt(0));
}
}
@SuppressWarnings("try")
protected void verifyIntervals() {
try (Indent indent = debug.logAndIndent("verifying intervals")) {
int len = intervalsSize;
for (int i = 0; i < len; i++) {
Interval i1 = intervals[i];
if (i1 == null) {
continue;
}
i1.checkSplitChildren();
if (i1.operandNumber != i) {
debug.log("Interval %d is on position %d in list", i1.operandNumber, i);
debug.log(i1.logString(this));
throw new GraalError("");
}
if (LIRValueUtil.isVariable(i1.operand) && i1.kind().equals(LIRKind.Illegal)) {
debug.log("Interval %d has no type assigned", i1.operandNumber);
debug.log(i1.logString(this));
throw new GraalError("");
}
if (i1.location() == null) {
debug.log("Interval %d has no register assigned", i1.operandNumber);
debug.log(i1.logString(this));
throw new GraalError("");
}
if (i1.isEmpty()) {
debug.log("Interval %d has no Range", i1.operandNumber);
debug.log(i1.logString(this));
throw new GraalError("");
}
for (Interval.RangeIterator r = new Interval.RangeIterator(i1); !r.isAtEnd(); r.next()) {
if (r.from() >= r.to()) {
debug.log("Interval %d has zero length range", i1.operandNumber);
debug.log(i1.logString(this));
throw new GraalError("");
}
}
for (int j = i + 1; j < len; j++) {
Interval i2 = intervals[j];
if (i2 == null) {
continue;
}
// special intervals that are created in MoveResolver
// . ignore them because the range information has no meaning there
if (i1.from() == 1 && i1.to() == 2) {
continue;
}
if (i2.from() == 1 && i2.to() == 2) {
continue;
}
Value l1 = i1.location();
Value l2 = i2.location();
if (i1.intersects(i2) && !isIllegal(l1) && (l1.equals(l2))) {
throw GraalError.shouldNotReachHere(String.format("Intervals %d and %d overlap and have the same register assigned\n%s\n%s", i1.operandNumber, i2.operandNumber,
i1.logString(this), i2.logString(this))); // ExcludeFromJacocoGeneratedReport
}
}
}
}
}
public LIR getLIR() {
return ir;
}
public FrameMapBuilder getFrameMapBuilder() {
return frameMapBuilder;
}
public int[] sortedBlocks() {
return sortedBlocks;
}
public List<Register> getRegisters() {
return registers;
}
public RegisterAllocationConfig getRegisterAllocationConfig() {
return regAllocConfig;
}
public boolean callKillsRegisters() {
return regAllocConfig.getRegisterConfig().areAllAllocatableRegistersCallerSaved();
}
boolean neverSpillConstants() {
return neverSpillConstants;
}
}
|
apache/rya | 32,328 | extras/indexingExample/src/main/java/RyaDirectExample.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.net.UnknownHostException;
import java.nio.charset.StandardCharsets;
import java.util.List;
import org.apache.accumulo.core.client.AccumuloException;
import org.apache.accumulo.core.client.AccumuloSecurityException;
import org.apache.accumulo.core.client.Connector;
import org.apache.accumulo.core.client.TableExistsException;
import org.apache.accumulo.core.client.TableNotFoundException;
import org.apache.accumulo.core.client.mock.MockInstance;
import org.apache.accumulo.core.client.security.tokens.PasswordToken;
import org.apache.commons.lang.Validate;
import org.apache.hadoop.conf.Configuration;
import org.apache.log4j.Logger;
import org.apache.rya.accumulo.AccumuloRdfConfiguration;
import org.apache.rya.api.persist.RyaDAOException;
import org.apache.rya.indexing.accumulo.AccumuloIndexingConfiguration;
import org.apache.rya.indexing.accumulo.ConfigUtils;
import org.apache.rya.indexing.external.PrecomputedJoinIndexerConfig;
import org.apache.rya.indexing.external.PrecomputedJoinIndexerConfig.PrecomputedJoinStorageType;
import org.apache.rya.indexing.pcj.storage.PcjException;
import org.apache.rya.indexing.pcj.storage.accumulo.PcjTables;
import org.apache.rya.rdftriplestore.inference.InferenceEngineException;
import org.apache.rya.sail.config.RyaSailFactory;
import org.eclipse.rdf4j.model.IRI;
import org.eclipse.rdf4j.model.ValueFactory;
import org.eclipse.rdf4j.model.impl.SimpleValueFactory;
import org.eclipse.rdf4j.model.vocabulary.RDF;
import org.eclipse.rdf4j.model.vocabulary.RDFS;
import org.eclipse.rdf4j.query.BindingSet;
import org.eclipse.rdf4j.query.MalformedQueryException;
import org.eclipse.rdf4j.query.QueryEvaluationException;
import org.eclipse.rdf4j.query.QueryLanguage;
import org.eclipse.rdf4j.query.QueryResultHandlerException;
import org.eclipse.rdf4j.query.TupleQuery;
import org.eclipse.rdf4j.query.TupleQueryResultHandler;
import org.eclipse.rdf4j.query.TupleQueryResultHandlerException;
import org.eclipse.rdf4j.query.Update;
import org.eclipse.rdf4j.query.UpdateExecutionException;
import org.eclipse.rdf4j.repository.RepositoryException;
import org.eclipse.rdf4j.repository.sail.SailRepository;
import org.eclipse.rdf4j.repository.sail.SailRepositoryConnection;
import org.eclipse.rdf4j.sail.Sail;
import org.eclipse.rdf4j.sail.SailException;
import com.google.common.base.Optional;
public class RyaDirectExample {
private static final Logger log = Logger.getLogger(RyaDirectExample.class);
//
// Connection configuration parameters
//
private static final boolean USE_MOCK_INSTANCE = true;
private static final boolean PRINT_QUERIES = true;
private static final String INSTANCE = "instance";
private static final String RYA_TABLE_PREFIX = "x_test_triplestore_";
private static final String AUTHS = "U";
public static void main(final String[] args) throws Exception {
final Configuration conf = getConf();
conf.set(PrecomputedJoinIndexerConfig.PCJ_STORAGE_TYPE, PrecomputedJoinStorageType.ACCUMULO.name());
conf.setBoolean(ConfigUtils.DISPLAY_QUERY_PLAN, PRINT_QUERIES);
log.info("Creating the tables as root.");
SailRepository repository = null;
SailRepositoryConnection conn = null;
try {
log.info("Creating PCJ Tables");
createPCJ(conf);
log.info("Connecting to Indexing Sail Repository.");
final Sail extSail = RyaSailFactory.getInstance(conf);
repository = new SailRepository(extSail);
conn = repository.getConnection();
final long start = System.currentTimeMillis();
log.info("Running SPARQL Example: Add and Delete");
testAddAndDelete(conn);
log.info("Running SAIL/SPARQL Example: PCJ Search");
testPCJSearch(conn);
log.info("Running SAIL/SPARQL Example: Add and Temporal Search");
testAddAndTemporalSearchWithPCJ(conn);
log.info("Running SAIL/SPARQL Example: Add and Free Text Search with PCJ");
testAddAndFreeTextSearchWithPCJ(conn);
// log.info("Running SPARQL Example: Add Point and Geo Search with PCJ");
//// testAddPointAndWithinSearchWithPCJ(conn);
// log.info("Running SPARQL Example: Temporal, Freetext, and Geo Search");
// testTemporalFreeGeoSearch(conn);
// log.info("Running SPARQL Example: Geo, Freetext, and PCJ Search");
// testGeoFreetextWithPCJSearch(conn);
log.info("Running SPARQL Example: Delete Temporal Data");
testDeleteTemporalData(conn);
log.info("Running SPARQL Example: Delete Free Text Data");
testDeleteFreeTextData(conn);
// log.info("Running SPARQL Example: Delete Geo Data");
// testDeleteGeoData(conn);
log.info("TIME: " + (System.currentTimeMillis() - start) / 1000.);
} finally {
log.info("Shutting down");
closeQuietly(conn);
closeQuietly(repository);
}
}
private static void closeQuietly(final SailRepository repository) {
if (repository != null) {
try {
repository.shutDown();
} catch (final RepositoryException e) {
// quietly absorb this exception
}
}
}
private static void closeQuietly(final SailRepositoryConnection conn) {
if (conn != null) {
try {
conn.close();
} catch (final RepositoryException e) {
// quietly absorb this exception
}
}
}
private static Configuration getConf() {
return AccumuloIndexingConfiguration.builder()
.setUseMockAccumulo(USE_MOCK_INSTANCE)
.setAuths(AUTHS)
.setAccumuloUser("root")
.setAccumuloPassword("")
.setAccumuloInstance(INSTANCE)
.setRyaPrefix(RYA_TABLE_PREFIX)
.setUsePcj(true)
.setUseAccumuloFreetextIndex(true)
.setUseAccumuloTemporalIndex(true)
.build();
}
public static void testAddAndDelete(final SailRepositoryConnection conn)
throws MalformedQueryException, RepositoryException,
UpdateExecutionException, QueryEvaluationException,
TupleQueryResultHandlerException, AccumuloException,
AccumuloSecurityException, TableNotFoundException {
// Add data
String query = "INSERT DATA\n"//
+ "{ GRAPH <http://updated/test> {\n"//
+ " <http://acme.com/people/Mike> " //
+ " <http://acme.com/actions/likes> \"A new book\" ;\n"//
+ " <http://acme.com/actions/likes> \"Avocados\" .\n"
+ "} }";
log.info("Performing Query");
Update update = conn.prepareUpdate(QueryLanguage.SPARQL, query);
update.execute();
query = "select ?p ?o { GRAPH <http://updated/test> {<http://acme.com/people/Mike> ?p ?o . }}";
final CountingResultHandler resultHandler = new CountingResultHandler();
TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL,
query);
tupleQuery.evaluate(resultHandler);
log.info("Result count : " + resultHandler.getCount());
Validate.isTrue(resultHandler.getCount() == 2);
resultHandler.resetCount();
// Delete Data
query = "DELETE DATA\n" //
+ "{ GRAPH <http://updated/test> {\n"
+ " <http://acme.com/people/Mike> <http://acme.com/actions/likes> \"A new book\" ;\n"
+ " <http://acme.com/actions/likes> \"Avocados\" .\n" + "}}";
update = conn.prepareUpdate(QueryLanguage.SPARQL, query);
update.execute();
query = "select ?p ?o { GRAPH <http://updated/test> {<http://acme.com/people/Mike> ?p ?o . }}";
tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
tupleQuery.evaluate(resultHandler);
log.info("Result count : " + resultHandler.getCount());
Validate.isTrue(resultHandler.getCount() == 0);
}
private static void testPCJSearch(final SailRepositoryConnection conn)
throws Exception {
String queryString;
TupleQuery tupleQuery;
CountingResultHandler tupleHandler;
// ///////////// search for bob
queryString = "SELECT ?e ?c ?l ?o " //
+ "{" //
+ " ?e a ?c . "//
+ " ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l . "//
+ " ?e <uri:talksTo> ?o . "//
+ "}";//
tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
tupleHandler = new CountingResultHandler();
tupleQuery.evaluate(tupleHandler);
log.info("Result count : " + tupleHandler.getCount());
Validate.isTrue(tupleHandler.getCount() == 1);
// ///////////// search for bob
queryString = "PREFIX fts: <http://rdf.useekm.com/fts#> "//
+ "SELECT ?e ?c ?l ?o " //
+ "{" //
+ " ?c a ?e . "//
+ " ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l . "//
+ " ?e <uri:talksTo> ?o . "//
+ "}";//
tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
tupleHandler = new CountingResultHandler();
tupleQuery.evaluate(tupleHandler);
log.info("Result count : " + tupleHandler.getCount());
Validate.isTrue(tupleHandler.getCount() == 2);
}
private static void testAddAndTemporalSearchWithPCJ(
final SailRepositoryConnection conn) throws Exception {
// create some resources and literals to make statements out of
final String sparqlInsert = "PREFIX time: <http://www.w3.org/2006/time#>\n"
+ "INSERT DATA {\n" //
+ "_:eventz a time:Instant ;\n"
+ " time:inXSDDateTime '2001-01-01T01:01:01-08:00' ;\n" // one
// second
+ " time:inXSDDateTime '2001-01-01T04:01:02.000-05:00'^^<http://www.w3.org/2001/XMLSchema#dateTime> ;\n" // 2
// seconds
+ " time:inXSDDateTime \"2001-01-01T01:01:03-08:00\" ;\n" // 3
// seconds
+ " time:inXSDDateTime '2001-01-01T01:01:04-08:00' ;\n" // 4
// seconds
+ " time:inXSDDateTime '2001-01-01T09:01:05Z' ;\n"
+ " time:inXSDDateTime '2006-01-01' ;\n"
+ " time:inXSDDateTime '2007-01-01' ;\n"
+ " time:inXSDDateTime '2008-01-01' ; .\n" + "}";
final Update update = conn.prepareUpdate(QueryLanguage.SPARQL,
sparqlInsert);
update.execute();
// Find all stored dates.
String queryString = "PREFIX time: <http://www.w3.org/2006/time#> \n"//
+ "PREFIX tempo: <tag:rya-rdf.org,2015:temporal#> \n"//
+ "SELECT ?event ?time \n" //
+ "WHERE { \n"
+ " ?event time:inXSDDateTime ?time . \n"//
+ " FILTER(tempo:after(?time, '2001-01-01T01:01:03-08:00') ) \n"// after
// 3
// seconds
+ "}";//
TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL,
queryString);
CountingResultHandler tupleHandler = new CountingResultHandler();
tupleQuery.evaluate(tupleHandler);
log.info("Result count : " + tupleHandler.getCount());
Validate.isTrue(tupleHandler.getCount() == 5);
// Find all stored dates.
queryString = "PREFIX time: <http://www.w3.org/2006/time#> \n"//
+ "PREFIX tempo: <tag:rya-rdf.org,2015:temporal#> \n"//
+ "SELECT ?event ?time \n" //
+ "WHERE { \n"
+ " ?event time:inXSDDateTime ?time . \n"//
+ " ?event a time:Instant . \n"//
+ " FILTER(tempo:after(?time, '2001-01-01T01:01:03-08:00') ) \n"// after
// 3
// seconds
+ "}";//
tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
tupleHandler = new CountingResultHandler();
tupleQuery.evaluate(tupleHandler);
log.info("Result count : " + tupleHandler.getCount());
Validate.isTrue(tupleHandler.getCount() == 5);
// Find all stored dates.
queryString = "PREFIX time: <http://www.w3.org/2006/time#> \n"//
+ "PREFIX tempo: <tag:rya-rdf.org,2015:temporal#> \n"//
+ "SELECT ?event ?time ?e ?c ?l ?o \n" //
+ "WHERE { \n"
+ " ?e a ?c . \n"//
+ " ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l . \n"//
+ " ?e <uri:talksTo> ?o . \n"//
+ " ?event a time:Instant . \n"//
+ " ?event time:inXSDDateTime ?time . \n"//
+ " FILTER(tempo:after(?time, '2001-01-01T01:01:03-08:00') ) \n"// after
// 3
// seconds
+ "}";//
tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
tupleHandler = new CountingResultHandler();
tupleQuery.evaluate(tupleHandler);
log.info("Result count : " + tupleHandler.getCount());
Validate.isTrue(tupleHandler.getCount() == 5);
}
private static void testAddAndFreeTextSearchWithPCJ(
final SailRepositoryConnection conn) throws Exception {
// add data to the repository using the SailRepository add methods
final ValueFactory f = conn.getValueFactory();
final IRI person = f.createIRI("http://example.org/ontology/Person");
String uuid;
uuid = "urn:people:alice";
conn.add(f.createIRI(uuid), RDF.TYPE, person);
conn.add(f.createIRI(uuid), RDFS.LABEL,
f.createLiteral("Alice Palace Hose", f.createIRI("xsd:string")));
uuid = "urn:people:bob";
conn.add(f.createIRI(uuid), RDF.TYPE, person);
conn.add(f.createIRI(uuid), RDFS.LABEL,
f.createLiteral("Bob Snob Hose", f.createIRI("xsd:string")));
String queryString;
TupleQuery tupleQuery;
CountingResultHandler tupleHandler;
// ///////////// search for alice
queryString = "PREFIX fts: <http://rdf.useekm.com/fts#> "//
+ "SELECT ?person ?match ?e ?c ?l ?o " //
+ "{" //
+ " ?person <http://www.w3.org/2000/01/rdf-schema#label> ?match . "//
+ " FILTER(fts:text(?match, \"pal*\")) " //
+ "}";//
tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
tupleHandler = new CountingResultHandler();
tupleQuery.evaluate(tupleHandler);
log.info("Result count : " + tupleHandler.getCount());
Validate.isTrue(tupleHandler.getCount() == 1);
// ///////////// search for alice and bob
queryString = "PREFIX fts: <http://rdf.useekm.com/fts#> "//
+ "SELECT ?person ?match " //
+ "{" //
+ " ?person <http://www.w3.org/2000/01/rdf-schema#label> ?match . "//
+ " ?person a <http://example.org/ontology/Person> . "//
+ " FILTER(fts:text(?match, \"(alice | bob) *SE\")) " //
+ "}";//
tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
tupleHandler = new CountingResultHandler();
tupleQuery.evaluate(tupleHandler);
log.info("Result count : " + tupleHandler.getCount());
Validate.isTrue(tupleHandler.getCount() == 2);
// ///////////// search for alice and bob
queryString = "PREFIX fts: <http://rdf.useekm.com/fts#> "//
+ "SELECT ?person ?match " //
+ "{" //
+ " ?person a <http://example.org/ontology/Person> . "//
+ " ?person <http://www.w3.org/2000/01/rdf-schema#label> ?match . "//
+ " FILTER(fts:text(?match, \"(alice | bob) *SE\")) " //
+ " FILTER(fts:text(?match, \"pal*\")) " //
+ "}";//
tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
tupleHandler = new CountingResultHandler();
tupleQuery.evaluate(tupleHandler);
log.info("Result count : " + tupleHandler.getCount());
Validate.isTrue(tupleHandler.getCount() == 1);
// ///////////// search for bob
queryString = "PREFIX fts: <http://rdf.useekm.com/fts#> "//
+ "SELECT ?person ?match ?e ?c ?l ?o " //
+ "{" //
+ " ?e a ?c . "//
+ " ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l . "//
+ " ?e <uri:talksTo> ?o . "//
+ " ?person a <http://example.org/ontology/Person> . "//
+ " ?person <http://www.w3.org/2000/01/rdf-schema#label> ?match . "//
+ " FILTER(fts:text(?match, \"!alice & hose\")) " //
+ "}";//
tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
tupleHandler = new CountingResultHandler();
tupleQuery.evaluate(tupleHandler);
log.info("Result count : " + tupleHandler.getCount());
Validate.isTrue(tupleHandler.getCount() == 1);
}
// private static void testAddPointAndWithinSearchWithPCJ(
// final SailRepositoryConnection conn) throws Exception {
//
// final String update = "PREFIX geo: <http://www.opengis.net/ont/geosparql#> "//
// + "INSERT DATA { " //
// + " <urn:feature> a geo:Feature ; " //
// + " geo:hasGeometry [ " //
// + " a geo:Point ; " //
// + " geo:asWKT \"Point(-77.03524 38.889468)\"^^geo:wktLiteral "//
// + " ] . " //
// + "}";
//
// final Update u = conn.prepareUpdate(QueryLanguage.SPARQL, update);
// u.execute();
//
// String queryString;
// TupleQuery tupleQuery;
// CountingResultHandler tupleHandler;
//
// // point outside search ring
// queryString = "PREFIX geo: <http://www.opengis.net/ont/geosparql#> "//
// + "PREFIX geof: <http://www.opengis.net/def/function/geosparql/> "//
// + "SELECT ?feature ?point ?wkt " //
// + "{" //
// + " ?feature a geo:Feature . "//
// + " ?feature geo:hasGeometry ?point . "//
// + " ?point a geo:Point . "//
// + " ?point geo:asWKT ?wkt . "//
// + " FILTER(geof:sfWithin(?wkt, \"POLYGON((-77 39, -76 39, -76 38, -77 38, -77 39))\"^^geo:wktLiteral)) " //
// + "}";//
// tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
// tupleHandler = new CountingResultHandler();
// tupleQuery.evaluate(tupleHandler);
// log.info("Result count : " + tupleHandler.getCount());
// Validate.isTrue(tupleHandler.getCount() == 0);
//
// // point inside search ring
// queryString = "PREFIX geo: <http://www.opengis.net/ont/geosparql#> "//
// + "PREFIX geof: <http://www.opengis.net/def/function/geosparql/> "//
// + "SELECT ?feature ?point ?wkt ?e ?l ?o" //
// + "{" //
// + " ?feature a ?e . "//
// + " ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l . "//
// + " ?e <uri:talksTo> ?o . "//
// + " ?feature a geo:Feature . "//
// + " ?feature geo:hasGeometry ?point . "//
// + " ?point a geo:Point . "//
// + " ?point geo:asWKT ?wkt . "//
// + " FILTER(geof:sfWithin(?wkt, \"POLYGON((-78 39, -77 39, -77 38, -78 38, -78 39))\"^^geo:wktLiteral)) " //
// + "}";//
//
// tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
// tupleHandler = new CountingResultHandler();
// tupleQuery.evaluate(tupleHandler);
// log.info("Result count : " + tupleHandler.getCount());
// Validate.isTrue(tupleHandler.getCount() == 1);
//
// // point inside search ring with Pre-Computed Join
// queryString = "PREFIX geo: <http://www.opengis.net/ont/geosparql#> "//
// + "PREFIX geof: <http://www.opengis.net/def/function/geosparql/> "//
// + "SELECT ?feature ?point ?wkt ?e ?l ?o" //
// + "{" //
// + " ?feature a ?e . "//
// + " ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l . "//
// + " ?e <uri:talksTo> ?o . "//
// + " ?feature a geo:Feature . "//
// + " ?feature geo:hasGeometry ?point . "//
// + " ?point a geo:Point . "//
// + " ?point geo:asWKT ?wkt . "//
// + " FILTER(geof:sfWithin(?wkt, \"POLYGON((-78 39, -77 39, -77 38, -78 38, -78 39))\"^^geo:wktLiteral)) " //
// + "}";//
//
// tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
// tupleHandler = new CountingResultHandler();
// tupleQuery.evaluate(tupleHandler);
// log.info("Result count : " + tupleHandler.getCount());
// Validate.isTrue(tupleHandler.getCount() >= 1); // may see points from
// // during previous runs
//
// // point outside search ring with PCJ
// queryString = "PREFIX geo: <http://www.opengis.net/ont/geosparql#> "//
// + "PREFIX geof: <http://www.opengis.net/def/function/geosparql/> "//
// + "SELECT ?feature ?point ?wkt ?e ?l ?o " //
// + "{" //
// + " ?feature a ?e . "//
// + " ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l . "//
// + " ?e <uri:talksTo> ?o . "//
// + " ?feature a geo:Feature . "//
// + " ?feature geo:hasGeometry ?point . "//
// + " ?point a geo:Point . "//
// + " ?point geo:asWKT ?wkt . "//
// + " FILTER(geof:sfWithin(?wkt, \"POLYGON((-77 39, -76 39, -76 38, -77 38, -77 39))\"^^geo:wktLiteral)) " //
// + "}";//
// tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
// tupleHandler = new CountingResultHandler();
// tupleQuery.evaluate(tupleHandler);
// log.info("Result count : " + tupleHandler.getCount());
// Validate.isTrue(tupleHandler.getCount() == 0);
//
// // point inside search ring with different Pre-Computed Join
// queryString = "PREFIX geo: <http://www.opengis.net/ont/geosparql#> "//
// + "PREFIX geof: <http://www.opengis.net/def/function/geosparql/> "//
// + "SELECT ?feature ?point ?wkt ?e ?c ?l ?o " //
// + "{" //
// + " ?e a ?c . "//
// + " ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l . "//
// + " ?e <uri:talksTo> ?o . "//
// + " ?feature a geo:Feature . "//
// + " ?feature geo:hasGeometry ?point . "//
// + " ?point a geo:Point . "//
// + " ?point geo:asWKT ?wkt . "//
// + " FILTER(geof:sfWithin(?wkt, \"POLYGON((-78 39, -77 39, -77 38, -78 38, -78 39))\"^^geo:wktLiteral)) " //
// + "}";//
// tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
// tupleHandler = new CountingResultHandler();
// tupleQuery.evaluate(tupleHandler);
// log.info("Result count : " + tupleHandler.getCount());
// Validate.isTrue(tupleHandler.getCount() == 1);
// }
//
// private static void testTemporalFreeGeoSearch(
// final SailRepositoryConnection conn)
// throws MalformedQueryException, RepositoryException,
// UpdateExecutionException, TupleQueryResultHandlerException,
// QueryEvaluationException {
//
// String queryString;
// TupleQuery tupleQuery;
// CountingResultHandler tupleHandler;
//
// // ring containing point
// queryString = "PREFIX geo: <http://www.opengis.net/ont/geosparql#> "//
// + "PREFIX geof: <http://www.opengis.net/def/function/geosparql/> "//
// + "PREFIX time: <http://www.w3.org/2006/time#> "//
// + "PREFIX tempo: <tag:rya-rdf.org,2015:temporal#> "//
// + "PREFIX fts: <http://rdf.useekm.com/fts#> "//
// + "SELECT ?feature ?point ?wkt ?event ?time ?person ?match" //
// + "{" //
// + " ?event a time:Instant . \n"//
// + " ?event time:inXSDDateTime ?time . \n"//
// + " FILTER(tempo:after(?time, '2001-01-01T01:01:03-08:00') ) \n"// after
// // 3
// // seconds
// + " ?feature a geo:Feature . "//
// + " ?feature geo:hasGeometry ?point . "//
// + " ?point a geo:Point . "//
// + " ?point geo:asWKT ?wkt . "//
// + " FILTER(geof:sfWithin(?wkt, \"POLYGON((-78 39, -77 39, -77 38, -78 38, -78 39))\"^^geo:wktLiteral)). " //
// + " ?person a <http://example.org/ontology/Person> . "//
// + " ?person <http://www.w3.org/2000/01/rdf-schema#label> ?match . "//
// + " FILTER(fts:text(?match, \"pal*\")) " //
// + "}";//
//
// tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
//
// tupleHandler = new CountingResultHandler();
// tupleQuery.evaluate(tupleHandler);
// log.info("Result count : " + tupleHandler.getCount());
// Validate.isTrue(tupleHandler.getCount() == 5);
//
// }
//
// private static void testGeoFreetextWithPCJSearch(
// final SailRepositoryConnection conn)
// throws MalformedQueryException, RepositoryException,
// TupleQueryResultHandlerException, QueryEvaluationException {
// // ring outside point
// final String queryString = "PREFIX geo: <http://www.opengis.net/ont/geosparql#> "//
// + "PREFIX fts: <http://rdf.useekm.com/fts#> "//
// + "PREFIX geof: <http://www.opengis.net/def/function/geosparql/> "//
// + "SELECT ?feature ?point ?wkt ?e ?c ?l ?o ?person ?match " //
// + "{" //
// + " ?person a <http://example.org/ontology/Person> . "//
// + " ?person <http://www.w3.org/2000/01/rdf-schema#label> ?match . "//
// + " FILTER(fts:text(?match, \"!alice & hose\")) " //
// + " ?e a ?c . "//
// + " ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l . "//
// + " ?e <uri:talksTo> ?o . "//
// + " ?feature a geo:Feature . "//
// + " ?feature geo:hasGeometry ?point . "//
// + " ?point a geo:Point . "//
// + " ?point geo:asWKT ?wkt . "//
// + " FILTER(geof:sfWithin(?wkt, \"POLYGON((-78 39, -77 39, -77 38, -78 38, -78 39))\"^^geo:wktLiteral)) " //
// + "}";//
// final TupleQuery tupleQuery = conn.prepareTupleQuery(
// QueryLanguage.SPARQL, queryString);
// final CountingResultHandler tupleHandler = new CountingResultHandler();
// tupleQuery.evaluate(tupleHandler);
// log.info("Result count : " + tupleHandler.getCount());
// Validate.isTrue(tupleHandler.getCount() == 1);
// }
private static void testDeleteTemporalData(
final SailRepositoryConnection conn) throws Exception {
// Delete all stored dates
final String sparqlDelete = "PREFIX time: <http://www.w3.org/2006/time#>\n"
+ "PREFIX tempo: <tag:rya-rdf.org,2015:temporal#> \n"//
+ "DELETE {\n" //
+ " ?event time:inXSDDateTime ?time . \n"
+ "}\n"
+ "WHERE { \n" + " ?event time:inXSDDateTime ?time . \n"//
+ "}";//
final Update deleteUpdate = conn.prepareUpdate(QueryLanguage.SPARQL,
sparqlDelete);
deleteUpdate.execute();
// Find all stored dates.
final String queryString = "PREFIX time: <http://www.w3.org/2006/time#> \n"//
+ "PREFIX tempo: <tag:rya-rdf.org,2015:temporal#> \n"//
+ "SELECT ?event ?time \n" //
+ "WHERE { \n"
+ " ?event time:inXSDDateTime ?time . \n"//
+ " FILTER(tempo:after(?time, '2001-01-01T01:01:03-08:00') ) \n"// after
// 3
// seconds
+ "}";//
final CountingResultHandler tupleHandler = new CountingResultHandler();
final TupleQuery tupleQuery = conn.prepareTupleQuery(
QueryLanguage.SPARQL, queryString);
tupleQuery.evaluate(tupleHandler);
log.info("Result count : " + tupleHandler.getCount());
Validate.isTrue(tupleHandler.getCount() == 0);
}
private static void testDeleteFreeTextData(
final SailRepositoryConnection conn) throws Exception {
// Delete data from the repository using the SailRepository remove
// methods
final ValueFactory f = conn.getValueFactory();
final IRI person = f.createIRI("http://example.org/ontology/Person");
String uuid;
uuid = "urn:people:alice";
conn.remove(f.createIRI(uuid), RDF.TYPE, person);
conn.remove(f.createIRI(uuid), RDFS.LABEL,
f.createLiteral("Alice Palace Hose", f.createIRI("xsd:string")));
uuid = "urn:people:bob";
conn.remove(f.createIRI(uuid), RDF.TYPE, person);
conn.remove(f.createIRI(uuid), RDFS.LABEL,
f.createLiteral("Bob Snob Hose", f.createIRI("xsd:string")));
conn.remove(person, RDFS.LABEL, f.createLiteral("label", f.createIRI("xsd:string")));
String queryString;
TupleQuery tupleQuery;
CountingResultHandler tupleHandler;
// Find all
queryString = "PREFIX fts: <http://rdf.useekm.com/fts#> "//
+ "SELECT ?person ?match " //
+ "{" //
+ " ?person <http://www.w3.org/2000/01/rdf-schema#label> ?match . "//
+ " ?person a <http://example.org/ontology/Person> . "//
+ "}";//
tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
tupleHandler = new CountingResultHandler();
tupleQuery.evaluate(tupleHandler);
log.info("Result count : " + tupleHandler.getCount());
Validate.isTrue(tupleHandler.getCount() == 0);
}
// private static void testDeleteGeoData(final SailRepositoryConnection conn)
// throws Exception {
// // Delete all stored points
// final String sparqlDelete = "PREFIX geo: <http://www.opengis.net/ont/geosparql#> "//
// + "PREFIX geof: <http://www.opengis.net/def/function/geosparql/> "//
// + "DELETE {\n" //
// + " ?feature a geo:Feature . "//
// + " ?feature geo:hasGeometry ?point . "//
// + " ?point a geo:Point . "//
// + " ?point geo:asWKT ?wkt . "//
// + "}\n" + "WHERE { \n" + " ?feature a geo:Feature . "//
// + " ?feature geo:hasGeometry ?point . "//
// + " ?point a geo:Point . "//
// + " ?point geo:asWKT ?wkt . "//
// + "}";//
//
// final Update deleteUpdate = conn.prepareUpdate(QueryLanguage.SPARQL,
// sparqlDelete);
// deleteUpdate.execute();
//
// String queryString;
// TupleQuery tupleQuery;
// CountingResultHandler tupleHandler;
//
// // Find all stored points
// queryString = "PREFIX geo: <http://www.opengis.net/ont/geosparql#> "//
// + "PREFIX geof: <http://www.opengis.net/def/function/geosparql/> "//
// + "SELECT ?feature ?point ?wkt " //
// + "{" //
// + " ?feature a geo:Feature . "//
// + " ?feature geo:hasGeometry ?point . "//
// + " ?point a geo:Point . "//
// + " ?point geo:asWKT ?wkt . "//
// + "}";//
// tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
// tupleHandler = new CountingResultHandler();
// tupleQuery.evaluate(tupleHandler);
// log.info("Result count : " + tupleHandler.getCount());
// Validate.isTrue(tupleHandler.getCount() == 0);
// }
private static void createPCJ(final Configuration conf)
throws RepositoryException, AccumuloException,
AccumuloSecurityException, TableExistsException, PcjException,
InferenceEngineException, NumberFormatException,
UnknownHostException, SailException, TableNotFoundException {
final Configuration config = new AccumuloRdfConfiguration(conf);
config.set(ConfigUtils.USE_PCJ, "false");
SailRepository repository = null;
SailRepositoryConnection conn = null;
try {
final Sail extSail = RyaSailFactory.getInstance(config);
repository = new SailRepository(extSail);
conn = repository.getConnection();
final String queryString1 = ""//
+ "SELECT ?e ?c ?l ?o " //
+ "{" //
+ " ?c a ?e . "//
+ " ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l . "//
+ " ?e <uri:talksTo> ?o . "//
+ "}";//
final String queryString2 = ""//
+ "SELECT ?e ?c ?l ?o " //
+ "{" //
+ " ?e a ?c . "//
+ " ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l . "//
+ " ?e <uri:talksTo> ?o . "//
+ "}";//
ValueFactory vf = SimpleValueFactory.getInstance();
IRI obj, subclass, talksTo;
final IRI person = vf.createIRI("urn:people:alice");
final IRI feature = vf.createIRI("urn:feature");
final IRI sub = vf.createIRI("uri:entity");
subclass = vf.createIRI("uri:class");
obj = vf.createIRI("uri:obj");
talksTo = vf.createIRI("uri:talksTo");
conn.add(person, RDF.TYPE, sub);
conn.add(feature, RDF.TYPE, sub);
conn.add(sub, RDF.TYPE, subclass);
conn.add(sub, RDFS.LABEL, vf.createLiteral("label"));
conn.add(sub, talksTo, obj);
final String tablename1 = RYA_TABLE_PREFIX + "INDEX_1";
final String tablename2 = RYA_TABLE_PREFIX + "INDEX_2";
final Connector accCon = new MockInstance(INSTANCE).getConnector(
"root", new PasswordToken("".getBytes(StandardCharsets.UTF_8)));
new PcjTables().createAndPopulatePcj(conn, accCon, tablename1,
queryString1, new String[] { "e", "c", "l", "o" },
Optional.absent());
new PcjTables().createAndPopulatePcj(conn, accCon, tablename2,
queryString2, new String[] { "e", "c", "l", "o" },
Optional.absent());
} catch (final RyaDAOException e) {
throw new Error("While creating PCJ tables.",e);
} finally {
closeQuietly(conn);
closeQuietly(repository);
}
}
private static class CountingResultHandler implements
TupleQueryResultHandler {
private int count = 0;
public int getCount() {
return count;
}
public void resetCount() {
count = 0;
}
@Override
public void startQueryResult(final List<String> arg0)
throws TupleQueryResultHandlerException {
}
@Override
public void handleSolution(final BindingSet arg0)
throws TupleQueryResultHandlerException {
count++;
System.out.println(arg0);
}
@Override
public void endQueryResult() throws TupleQueryResultHandlerException {
}
@Override
public void handleBoolean(final boolean arg0)
throws QueryResultHandlerException {
}
@Override
public void handleLinks(final List<String> arg0)
throws QueryResultHandlerException {
}
}
}
|
apache/drill | 34,778 | exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/convert/TestDirectConverter.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.drill.exec.physical.impl.scan.convert;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.math.BigDecimal;
import java.time.Instant;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.LocalTime;
import java.time.ZoneId;
import java.time.ZoneOffset;
import java.util.HashMap;
import java.util.Map;
import org.apache.drill.common.exceptions.UserException;
import org.apache.drill.common.types.TypeProtos.DataMode;
import org.apache.drill.common.types.TypeProtos.MinorType;
import org.apache.drill.exec.memory.BufferAllocator;
import org.apache.drill.exec.physical.impl.scan.convert.StandardConversions.ConversionDefn;
import org.apache.drill.exec.physical.impl.scan.convert.StandardConversions.ConversionType;
import org.apache.drill.exec.physical.impl.scan.v3.FixedReceiver;
import org.apache.drill.exec.physical.rowSet.RowSet;
import org.apache.drill.exec.physical.rowSet.RowSet.SingleRowSet;
import org.apache.drill.exec.physical.rowSet.RowSetBuilder;
import org.apache.drill.exec.physical.rowSet.RowSetTestUtils;
import org.apache.drill.exec.physical.rowSet.RowSetWriter;
import org.apache.drill.exec.record.metadata.ColumnMetadata;
import org.apache.drill.exec.record.metadata.SchemaBuilder;
import org.apache.drill.exec.record.metadata.TupleMetadata;
import org.apache.drill.exec.record.metadata.TupleNameSpace;
import org.apache.drill.exec.vector.accessor.InvalidConversionError;
import org.apache.drill.exec.vector.accessor.ScalarWriter;
import org.apache.drill.exec.vector.accessor.ValueWriter;
import org.apache.drill.test.SubOperatorTest;
import org.apache.drill.test.rowSet.RowSetUtilities;
import org.joda.time.Period;
import org.junit.Test;
public class TestDirectConverter extends SubOperatorTest {
/**
* Mock column conversion factory that takes an input schema, matches it against
* the given writer, and inserts a standard type conversion shim.
*/
private static class ConversionTestFixture {
private final RowSetWriter rowWriter;
private StandardConversions conversions;
private final TupleNameSpace<ValueWriter> rowFormat = new TupleNameSpace<>();
public ConversionTestFixture(BufferAllocator allocator, TupleMetadata outputSchema) {
rowWriter = RowSetTestUtils.makeWriter(allocator, outputSchema);
}
public ConversionTestFixture withProperties(Map<String,String> props) {
conversions = StandardConversions.builder().withProperties(props).build();
return this;
}
private StandardConversions conversions() {
if (conversions == null) {
conversions = StandardConversions.builder().build();
}
return conversions;
}
public void createConvertersFor(TupleMetadata inputSchema) {
for (ColumnMetadata inputCol : inputSchema) {
addColumn(inputCol);
}
}
public void addColumn(ColumnMetadata source) {
ScalarWriter colWriter = rowWriter.scalar(source.name());
// Test uses simple row writer; no support for adding columns.
assertNotNull(colWriter);
ValueWriter converter = conversions().converterFor(colWriter, source);
assertNotNull(converter);
rowFormat.add(source.name(), converter);
}
public ConversionTestFixture addRow(Object...cols) {
assertTrue(cols.length <= rowFormat.count());
for (int i = 0; i < rowFormat.count(); i++) {
rowFormat.get(i).setValue(cols[i]);
}
rowWriter.save();
return this;
}
public ConversionTestFixture addSingleCol(Object col) {
rowFormat.get(0).setValue(col);
rowWriter.save();
return this;
}
public RowSet build() {
return rowWriter.done();
}
}
@Test
public void testSchemaMerge() {
TupleMetadata providedSchema = new SchemaBuilder()
.add("a", MinorType.INT)
.addNullable("b", MinorType.INT)
.add("c", MinorType.INT)
.build();
providedSchema.setBooleanProperty("foo", true);
TupleMetadata readerSchema = new SchemaBuilder()
.add("a", MinorType.VARCHAR)
.add("b", MinorType.VARCHAR)
.add("d", MinorType.VARCHAR)
.build();
TupleMetadata expected = new SchemaBuilder()
.add("a", MinorType.INT)
.addNullable("b", MinorType.INT)
.add("d", MinorType.VARCHAR)
.build();
TupleMetadata mergedSchema = FixedReceiver.Builder.mergeSchemas(providedSchema, readerSchema);
assertTrue(expected.isEquivalent(mergedSchema));
assertTrue(mergedSchema.booleanProperty("foo"));
}
/**
* Test the standard string-to-type conversion using an ad-hoc conversion
* from the input type (the type used by the row set builder) to the output
* (vector) type.
*/
@Test
public void testStringToNumberConversion() {
// Create the input and output schemas
TupleMetadata outputSchema = new SchemaBuilder()
.add("ti", MinorType.TINYINT)
.add("si", MinorType.SMALLINT)
.add("int", MinorType.INT)
.add("bi", MinorType.BIGINT)
.add("fl", MinorType.FLOAT4)
.add("db", MinorType.FLOAT8)
.buildSchema();
TupleMetadata inputSchema = new SchemaBuilder()
.add("ti", MinorType.VARCHAR)
.add("si", MinorType.VARCHAR)
.add("int", MinorType.VARCHAR)
.add("bi", MinorType.VARCHAR)
.add("fl", MinorType.VARCHAR)
.add("db", MinorType.VARCHAR)
.buildSchema();
// Load test data using converters
ConversionTestFixture testFixture = new ConversionTestFixture(fixture.allocator(), outputSchema);
testFixture.createConvertersFor(inputSchema);
RowSet actual = testFixture
.addRow("11", "12", "13", "14", "15.5", "16.25")
.addRow("127", "32757", Integer.toString(Integer.MAX_VALUE),
Long.toString(Long.MAX_VALUE), "10E6", "10E200")
.build();
// Build the expected vector without a type converter.
final SingleRowSet expected = fixture.rowSetBuilder(outputSchema)
.addRow(11, 12, 13, 14L, 15.5F, 16.25D)
.addRow(127, 32757, Integer.MAX_VALUE, Long.MAX_VALUE, 10E6F, 10E200D)
.build();
// Compare
RowSetUtilities.verify(expected, actual);
}
/**
* Test the standard string-to-type conversion using an ad-hoc conversion
* from the input type (the type used by the row set builder) to the output
* (vector) type.
*/
@Test
public void testNumberToStringConversion() {
// Create the schema
TupleMetadata outputSchema = new SchemaBuilder()
.add("ti", MinorType.VARCHAR)
.add("si", MinorType.VARCHAR)
.add("int", MinorType.VARCHAR)
.add("bi", MinorType.VARCHAR)
.add("fl", MinorType.VARCHAR)
.add("db", MinorType.VARCHAR)
.buildSchema();
TupleMetadata inputSchema = new SchemaBuilder()
.add("ti", MinorType.TINYINT)
.add("si", MinorType.SMALLINT)
.add("int", MinorType.INT)
.add("bi", MinorType.BIGINT)
.add("fl", MinorType.FLOAT4)
.add("db", MinorType.FLOAT8)
.buildSchema();
// The setObject() method won't do the Float to Double conversion,
// so values before are provided as doubles in the float case.
ConversionTestFixture testFixture = new ConversionTestFixture(fixture.allocator(), outputSchema);
testFixture.createConvertersFor(inputSchema);
RowSet actual = testFixture
.addRow(11, 12, 13, 14L, 15.5D, 16.25D)
.addRow(127, 32757, Integer.MAX_VALUE, Long.MAX_VALUE, 10E6D, 10E200D)
.build();
// Build the expected vector without a type converter.
final SingleRowSet expected = fixture.rowSetBuilder(outputSchema)
.addRow("11", "12", "13", "14", "15.5", "16.25")
.addRow("127", "32757", Integer.toString(Integer.MAX_VALUE),
Long.toString(Long.MAX_VALUE), "1.0E7", "1.0E201")
.build();
// Compare
RowSetUtilities.verify(expected, actual);
}
@Test
public void testStringToNumberConversionError() {
TupleMetadata outputSchema = new SchemaBuilder()
.add("int", MinorType.INT)
.buildSchema();
TupleMetadata inputSchema = new SchemaBuilder()
.add("int", MinorType.VARCHAR)
.buildSchema();
ConversionTestFixture testFixture = new ConversionTestFixture(fixture.allocator(), outputSchema);
testFixture.createConvertersFor(inputSchema);
try {
testFixture.addRow("foo");
fail();
} catch (InvalidConversionError e) {
// Expected
} finally {
testFixture.build().clear();
}
}
public static BigDecimal dec(int value) {
return new BigDecimal(value);
}
/**
* Tests the implicit conversions provided by the column writer itself.
* No conversion mechanism is needed in this case.
*/
@Test
public void testImplicitConversion() {
TupleMetadata schema = new SchemaBuilder()
.add("ti", MinorType.TINYINT)
.add("si", MinorType.SMALLINT)
.add("int", MinorType.INT)
.add("bi", MinorType.BIGINT)
.add("fl", MinorType.FLOAT4)
.add("db", MinorType.FLOAT8)
.add("dec", MinorType.VARDECIMAL, 10, 0)
.buildSchema();
// Test allowed implicit conversions.
RowSet actual = new RowSetBuilder(fixture.allocator(), schema)
.addRow(11, 12, 13, 14, 15, 16, 17) // int
.addRow(21L, 22L, 23L, 24L, 25L, 26L, 27L) // long
.addRow(31F, 32F, 33F, 34F, 35F, 36F, 37F) // float
.addRow(41D, 42D, 43D, 44D, 45D, 46D, 47D) // double
.addRow(dec(51), dec(52), dec(53), dec(54), dec(55), dec(56), dec(57)) // decimal
.build();
final SingleRowSet expected = fixture.rowSetBuilder(schema)
.addRow(11, 12, 13, 14L, 15F, 16D, dec(17))
.addRow(21, 22, 23, 24L, 25F, 26D, dec(27))
.addRow(31, 32, 33, 34L, 35F, 36D, dec(37))
.addRow(41, 42, 43, 44L, 45F, 46D, dec(47))
.addRow(51, 52, 53, 54L, 55L, 56D, dec(57))
.build();
RowSetUtilities.verify(expected, actual);
}
/**
* The column accessors provide only int setters. For performance, the int value is
* assumed to be of the correct range for the target column. If not, truncation of
* the highest bytes occurs.
* <p>
* The assumption is, if the reader or other code expects that overflow might
* occur, that code should be implemented in the client (or in a type conversion
* shim), leaving the normal code path to optimize for the 99% of the cases where
* the value is in the proper range.
*/
@Test
public void testImplicitConversionIntTruncation() {
TupleMetadata schema = new SchemaBuilder()
.add("ti", MinorType.TINYINT)
.add("si", MinorType.SMALLINT)
.buildSchema();
// Test allowed implicit conversions.
RowSet actual = new RowSetBuilder(fixture.allocator(), schema)
.addRow(Byte.MAX_VALUE + 1, Short.MAX_VALUE + 1)
.addRow(Byte.MAX_VALUE + 2, Short.MAX_VALUE + 2)
.build();
// Build the expected vector without a type converter.
final SingleRowSet expected = fixture.rowSetBuilder(schema)
.addRow(Byte.MIN_VALUE, Short.MIN_VALUE)
.addRow(Byte.MIN_VALUE + 1, Short.MIN_VALUE + 1)
.build();
RowSetUtilities.verify(expected, actual);
}
/**
* Overflow from double-to-int is detected.
*/
@Test
public void testImplicitConversionIntOverflow() {
TupleMetadata schema = new SchemaBuilder()
.add("int", MinorType.INT)
.buildSchema();
{
RowSetBuilder builder = new RowSetBuilder(fixture.allocator(), schema);
try {
builder.addRow((long) Integer.MAX_VALUE + 1);
fail();
} catch (InvalidConversionError e) {
// Expected
} finally {
builder.build().clear();
}
}
{
RowSetBuilder builder = new RowSetBuilder(fixture.allocator(), schema);
try {
builder.addRow((double) Integer.MAX_VALUE + 1);
fail();
} catch (InvalidConversionError e) {
// Expected
} finally {
builder.build().clear();
}
}
}
/**
* Implicit conversion from double (or float) follows the Java Math.round
* rules: round to the closest long value. Readers that want other behavior
* should insert a type-conversion shim to implement the preferred rules.
*/
@Test
public void testImplicitConversionDoubleClamp() {
TupleMetadata schema = new SchemaBuilder()
.add("bi", MinorType.BIGINT)
.buildSchema();
RowSet actual = new RowSetBuilder(fixture.allocator(), schema)
.addRow(Long.MAX_VALUE * 10D)
.addRow(Double.NaN)
.addRow(Double.MAX_VALUE)
.addRow(Double.MIN_VALUE)
.addRow(Double.POSITIVE_INFINITY)
.addRow(Double.NEGATIVE_INFINITY)
.build();
final SingleRowSet expected = fixture.rowSetBuilder(schema)
.addRow(Long.MAX_VALUE)
.addRow(0)
.addRow(Long.MAX_VALUE)
.addRow(0)
.addRow(Long.MAX_VALUE)
.addRow(Long.MIN_VALUE)
.build();
RowSetUtilities.verify(expected, actual);
}
/**
* Implicit conversion from String to period using default ISO
* format.
*/
@Test
public void testStringToInterval() {
TupleMetadata outputSchema = new SchemaBuilder()
.add("id", MinorType.INTERVALDAY)
.add("iy", MinorType.INTERVALYEAR)
.add("int", MinorType.INTERVAL)
.buildSchema();
TupleMetadata inputSchema = new SchemaBuilder()
.add("id", MinorType.VARCHAR)
.add("iy", MinorType.VARCHAR)
.add("int", MinorType.VARCHAR)
.buildSchema();
ConversionTestFixture testFixture = new ConversionTestFixture(fixture.allocator(), outputSchema);
testFixture.createConvertersFor(inputSchema);
RowSet actual = testFixture
.addRow("P2DT3H4M5S", "P9Y8M", "P9Y8M2DT3H4M5S")
.build();
Period p1 = Period.days(2).plusHours(3).plusMinutes(4).plusSeconds(5);
Period p2 = Period.years(9).plusMonths(8);
Period p3 = p1.plus(p2);
final SingleRowSet expected = fixture.rowSetBuilder(outputSchema)
.addRow(p1, p2, p3)
.build();
RowSetUtilities.verify(expected, actual);
}
@Test
public void testIntervalToString() {
TupleMetadata outputSchema = new SchemaBuilder()
.add("id", MinorType.VARCHAR)
.add("iy", MinorType.VARCHAR)
.add("int", MinorType.VARCHAR)
.buildSchema();
TupleMetadata inputSchema = new SchemaBuilder()
.add("id", MinorType.INTERVALDAY)
.add("iy", MinorType.INTERVALYEAR)
.add("int", MinorType.INTERVAL)
.buildSchema();
Period p1 = Period.days(2).plusHours(3).plusMinutes(4).plusSeconds(5);
Period p2 = Period.years(9).plusMonths(8);
Period p3 = p1.plus(p2);
ConversionTestFixture testFixture = new ConversionTestFixture(fixture.allocator(), outputSchema);
testFixture.createConvertersFor(inputSchema);
RowSet actual = testFixture
.addRow(p1, p2, p3)
.build();
final SingleRowSet expected = fixture.rowSetBuilder(outputSchema)
.addRow("P2DT3H4M5S", "P9Y8M", "P9Y8M2DT3H4M5S")
.build();
RowSetUtilities.verify(expected, actual);
}
/**
* Test VARCHAR to DATE, TIME and TIMESTAMP conversion
* using default ISO formats.
*/
@Test
public void testStringToDateTimeDefault() {
TupleMetadata outputSchema = new SchemaBuilder()
.add("date", MinorType.DATE)
.add("time", MinorType.TIME)
.add("ts", MinorType.TIMESTAMP)
.buildSchema();
TupleMetadata inputSchema = new SchemaBuilder()
.add("date", MinorType.VARCHAR)
.add("time", MinorType.VARCHAR)
.add("ts", MinorType.VARCHAR)
.buildSchema();
ConversionTestFixture testFixture = new ConversionTestFixture(fixture.allocator(), outputSchema);
testFixture.createConvertersFor(inputSchema);
RowSet actual = testFixture
.addRow("2019-03-28", "12:34:56", "2019-03-28T12:34:56")
.build();
LocalTime lt = LocalTime.of(12, 34, 56);
LocalDate ld = LocalDate.of(2019, 3, 28);
Instant ts = LocalDateTime.of(ld, lt).toInstant(ZoneOffset.UTC);
final SingleRowSet expected = fixture.rowSetBuilder(outputSchema)
.addRow(ld, lt, ts)
.build();
RowSetUtilities.verify(expected, actual);
}
@Test
public void testStringToDateTimeCustom() {
TupleMetadata outputSchema = new SchemaBuilder()
.add("date", MinorType.DATE)
.add("time", MinorType.TIME)
.add("ts", MinorType.TIMESTAMP)
.buildSchema();
outputSchema.metadata("date").setFormat("M/d/yyyy");
outputSchema.metadata("time").setFormat("hh:mm:ss a");
outputSchema.metadata("ts").setFormat("M/d/yyyy hh:mm:ss a VV");
TupleMetadata inputSchema = new SchemaBuilder()
.add("date", MinorType.VARCHAR)
.add("time", MinorType.VARCHAR)
.add("ts", MinorType.VARCHAR)
.buildSchema();
ConversionTestFixture testFixture = new ConversionTestFixture(fixture.allocator(), outputSchema);
testFixture.createConvertersFor(inputSchema);
RowSet actual = testFixture
.addRow("3/28/2019", "12:34:56 PM", "3/28/2019 12:34:56 PM Z")
.build();
LocalTime lt = LocalTime.of(12, 34, 56);
LocalDate ld = LocalDate.of(2019, 3, 28);
Instant ts = LocalDateTime.of(ld, lt).toInstant(ZoneOffset.UTC);
final SingleRowSet expected = fixture.rowSetBuilder(outputSchema)
.addRow(ld, lt, ts)
.build();
RowSetUtilities.verify(expected, actual);
}
@Test
public void testDateTimeToString() {
TupleMetadata outputSchema = new SchemaBuilder()
.add("date", MinorType.VARCHAR)
.add("time", MinorType.VARCHAR)
.add("ts", MinorType.VARCHAR)
.buildSchema();
TupleMetadata inputSchema = new SchemaBuilder()
.add("date", MinorType.DATE)
.add("time", MinorType.TIME)
.add("ts", MinorType.TIMESTAMP)
.buildSchema();
LocalTime lt = LocalTime.of(12, 34, 56);
LocalDate ld = LocalDate.of(2019, 3, 28);
Instant ts = LocalDateTime.of(ld, lt).atZone(ZoneId.systemDefault()).toInstant();
ConversionTestFixture testFixture = new ConversionTestFixture(fixture.allocator(), outputSchema);
testFixture.createConvertersFor(inputSchema);
RowSet actual = testFixture
.addRow(ld, lt, ts)
.build();
final SingleRowSet expected = fixture.rowSetBuilder(outputSchema)
.addRow("2019-03-28", "12:34:56", "2019-03-28T12:34:56")
.build();
RowSetUtilities.verify(expected, actual);
}
/**
* Test conversion to/from Java-style Booleans.
*/
@Test
public void testBooleanToFromString() {
TupleMetadata outputSchema = new SchemaBuilder()
.add("bool", MinorType.BIT)
.add("str", MinorType.VARCHAR)
.buildSchema();
TupleMetadata inputSchema = new SchemaBuilder()
.add("bool", MinorType.VARCHAR)
.add("str", MinorType.BIT)
.buildSchema();
ConversionTestFixture testFixture = new ConversionTestFixture(fixture.allocator(), outputSchema);
testFixture.createConvertersFor(inputSchema);
RowSet actual = testFixture
.addRow("true", false)
.addRow("false", true)
.addRow("TRUE", false)
.addRow("FALSE", true)
.build();
final SingleRowSet expected = fixture.rowSetBuilder(outputSchema)
.addRow(true, "false")
.addRow(false, "true")
.addRow(true, "false")
.addRow(false, "true")
.build();
RowSetUtilities.verify(expected, actual);
}
private static BigDecimal dec(String value) {
return new BigDecimal(value);
}
@Test
public void testDecimalFromString() {
TupleMetadata outputSchema = new SchemaBuilder()
.add("id", MinorType.INT)
.add("dec", MinorType.VARDECIMAL, 4, 2)
.buildSchema();
TupleMetadata inputSchema = new SchemaBuilder()
.add("id", MinorType.INT)
.add("dec", MinorType.VARCHAR)
.buildSchema();
ConversionTestFixture testFixture = new ConversionTestFixture(fixture.allocator(), outputSchema);
testFixture.createConvertersFor(inputSchema);
RowSet actual = testFixture
.addRow(1, "0")
.addRow(2, "-0")
.addRow(3, "0.12")
.addRow(4, "1.23")
.addRow(5, "12.34")
// Rounding occurs for VARDECIMAL
.addRow(6, "23.456")
.addRow(7, "-99.99")
.build();
final SingleRowSet expected = fixture.rowSetBuilder(outputSchema)
.addRow(1, dec("0"))
.addRow(2, dec("-0"))
.addRow(3, dec("0.12"))
.addRow(4, dec("1.23"))
.addRow(5, dec("12.34"))
.addRow(6, dec("23.46"))
.addRow(7, dec("-99.99"))
.build();
RowSetUtilities.verify(expected, actual);
}
@Test
public void testDecimalOverflow() {
TupleMetadata outputSchema = new SchemaBuilder()
.add("id", MinorType.INT)
.add("dec", MinorType.VARDECIMAL, 4, 2)
.buildSchema();
TupleMetadata inputSchema = new SchemaBuilder()
.add("id", MinorType.INT)
.add("dec", MinorType.VARCHAR)
.buildSchema();
ConversionTestFixture testFixture = new ConversionTestFixture(fixture.allocator(), outputSchema);
testFixture.createConvertersFor(inputSchema);
try {
testFixture.addRow(1, "1234567.89");
fail();
} catch (UserException e) {
// Expected
}
testFixture.build().clear();
}
private static void expect(ConversionType type, ConversionDefn defn) {
assertEquals(type, defn.type);
}
/**
* Test the conversion type for a subset of type pairs.
*/
@Test
public void testBasicConversionType() {
StandardConversions conversions = StandardConversions.builder().build();
TupleMetadata schema = new SchemaBuilder()
.add("ti", MinorType.TINYINT)
.add("si", MinorType.SMALLINT)
.add("int", MinorType.INT)
.add("bi", MinorType.BIGINT)
.add("fl", MinorType.FLOAT4)
.add("db", MinorType.FLOAT8)
.add("dec", MinorType.VARDECIMAL, 10, 0)
.add("str", MinorType.VARCHAR)
.buildSchema();
ColumnMetadata tinyIntCol = schema.metadata("ti");
ColumnMetadata smallIntCol = schema.metadata("si");
ColumnMetadata intCol = schema.metadata("int");
ColumnMetadata bigIntCol = schema.metadata("bi");
ColumnMetadata float4Col = schema.metadata("fl");
ColumnMetadata float8Col = schema.metadata("db");
ColumnMetadata decimalCol = schema.metadata("dec");
ColumnMetadata stringCol = schema.metadata("str");
// TinyInt --> x
expect(ConversionType.NONE, conversions.analyze(tinyIntCol, tinyIntCol));
expect(ConversionType.IMPLICIT, conversions.analyze(tinyIntCol, smallIntCol));
expect(ConversionType.IMPLICIT, conversions.analyze(tinyIntCol, intCol));
expect(ConversionType.IMPLICIT, conversions.analyze(tinyIntCol, bigIntCol));
expect(ConversionType.IMPLICIT, conversions.analyze(tinyIntCol, float4Col));
expect(ConversionType.IMPLICIT, conversions.analyze(tinyIntCol, float8Col));
expect(ConversionType.EXPLICIT, conversions.analyze(tinyIntCol, decimalCol));
expect(ConversionType.EXPLICIT, conversions.analyze(tinyIntCol, stringCol));
// SmallInt --> x
expect(ConversionType.IMPLICIT_UNSAFE, conversions.analyze(smallIntCol, tinyIntCol));
expect(ConversionType.NONE, conversions.analyze(smallIntCol, smallIntCol));
expect(ConversionType.IMPLICIT, conversions.analyze(smallIntCol, intCol));
expect(ConversionType.IMPLICIT, conversions.analyze(smallIntCol, bigIntCol));
expect(ConversionType.IMPLICIT, conversions.analyze(smallIntCol, float4Col));
expect(ConversionType.IMPLICIT, conversions.analyze(smallIntCol, float8Col));
expect(ConversionType.EXPLICIT, conversions.analyze(smallIntCol, decimalCol));
expect(ConversionType.EXPLICIT, conversions.analyze(smallIntCol, stringCol));
// Int --> x
expect(ConversionType.IMPLICIT_UNSAFE, conversions.analyze(intCol, tinyIntCol));
expect(ConversionType.IMPLICIT_UNSAFE, conversions.analyze(intCol, smallIntCol));
expect(ConversionType.NONE, conversions.analyze(intCol, intCol));
expect(ConversionType.IMPLICIT, conversions.analyze(intCol, bigIntCol));
expect(ConversionType.IMPLICIT, conversions.analyze(intCol, float4Col));
expect(ConversionType.IMPLICIT, conversions.analyze(intCol, float8Col));
expect(ConversionType.EXPLICIT, conversions.analyze(intCol, decimalCol));
expect(ConversionType.EXPLICIT, conversions.analyze(intCol, stringCol));
// BigInt --> x
expect(ConversionType.IMPLICIT_UNSAFE, conversions.analyze(bigIntCol, tinyIntCol));
expect(ConversionType.IMPLICIT_UNSAFE, conversions.analyze(bigIntCol, smallIntCol));
expect(ConversionType.IMPLICIT_UNSAFE, conversions.analyze(bigIntCol, intCol));
expect(ConversionType.NONE, conversions.analyze(bigIntCol, bigIntCol));
expect(ConversionType.IMPLICIT, conversions.analyze(bigIntCol, float4Col));
expect(ConversionType.IMPLICIT, conversions.analyze(bigIntCol, float8Col));
expect(ConversionType.EXPLICIT, conversions.analyze(bigIntCol, decimalCol));
expect(ConversionType.EXPLICIT, conversions.analyze(bigIntCol, stringCol));
// Float4 --> x
expect(ConversionType.IMPLICIT_UNSAFE, conversions.analyze(float4Col, tinyIntCol));
expect(ConversionType.IMPLICIT_UNSAFE, conversions.analyze(float4Col, smallIntCol));
expect(ConversionType.IMPLICIT_UNSAFE, conversions.analyze(float4Col, intCol));
expect(ConversionType.IMPLICIT_UNSAFE, conversions.analyze(float4Col, bigIntCol));
expect(ConversionType.NONE, conversions.analyze(float4Col, float4Col));
expect(ConversionType.IMPLICIT, conversions.analyze(float4Col, float8Col));
expect(ConversionType.EXPLICIT, conversions.analyze(float4Col, decimalCol));
expect(ConversionType.EXPLICIT, conversions.analyze(float4Col, stringCol));
// Float8 --> x
expect(ConversionType.IMPLICIT_UNSAFE, conversions.analyze(float8Col, tinyIntCol));
expect(ConversionType.IMPLICIT_UNSAFE, conversions.analyze(float8Col, smallIntCol));
expect(ConversionType.IMPLICIT_UNSAFE, conversions.analyze(float8Col, intCol));
expect(ConversionType.IMPLICIT_UNSAFE, conversions.analyze(float8Col, bigIntCol));
expect(ConversionType.IMPLICIT_UNSAFE, conversions.analyze(float8Col, float4Col));
expect(ConversionType.NONE, conversions.analyze(float8Col, float8Col));
expect(ConversionType.EXPLICIT, conversions.analyze(float8Col, decimalCol));
expect(ConversionType.EXPLICIT, conversions.analyze(float8Col, stringCol));
// Decimal --> x
expect(ConversionType.EXPLICIT, conversions.analyze(decimalCol, tinyIntCol));
expect(ConversionType.EXPLICIT, conversions.analyze(decimalCol, smallIntCol));
expect(ConversionType.EXPLICIT, conversions.analyze(decimalCol, intCol));
expect(ConversionType.EXPLICIT, conversions.analyze(decimalCol, bigIntCol));
expect(ConversionType.EXPLICIT, conversions.analyze(decimalCol, float4Col));
expect(ConversionType.EXPLICIT, conversions.analyze(decimalCol, float8Col));
expect(ConversionType.NONE, conversions.analyze(decimalCol, decimalCol));
expect(ConversionType.EXPLICIT, conversions.analyze(decimalCol, stringCol));
// VarChar --> x
expect(ConversionType.EXPLICIT, conversions.analyze(stringCol, tinyIntCol));
expect(ConversionType.EXPLICIT, conversions.analyze(stringCol, smallIntCol));
expect(ConversionType.EXPLICIT, conversions.analyze(stringCol, intCol));
expect(ConversionType.EXPLICIT, conversions.analyze(stringCol, bigIntCol));
expect(ConversionType.EXPLICIT, conversions.analyze(stringCol, float4Col));
expect(ConversionType.EXPLICIT, conversions.analyze(stringCol, float8Col));
expect(ConversionType.EXPLICIT, conversions.analyze(stringCol, decimalCol));
expect(ConversionType.NONE, conversions.analyze(stringCol, stringCol));
}
/**
* Test the specialized types: conversation to/from string.
*/
@Test
public void testSpecialConversionType() {
StandardConversions conversions = StandardConversions.builder().build();
TupleMetadata schema = new SchemaBuilder()
.add("time", MinorType.TIME)
.add("date", MinorType.DATE)
.add("ts", MinorType.TIMESTAMP)
.add("interval", MinorType.INTERVAL)
.add("year", MinorType.INTERVALYEAR)
.add("day", MinorType.INTERVALDAY)
.add("int", MinorType.INT)
.add("bi", MinorType.BIGINT)
.add("str", MinorType.VARCHAR)
.buildSchema();
ColumnMetadata timeCol = schema.metadata("time");
ColumnMetadata dateCol = schema.metadata("date");
ColumnMetadata tsCol = schema.metadata("ts");
ColumnMetadata intervalCol = schema.metadata("interval");
ColumnMetadata yearCol = schema.metadata("year");
ColumnMetadata dayCol = schema.metadata("day");
ColumnMetadata intCol = schema.metadata("int");
ColumnMetadata bigIntCol = schema.metadata("bi");
ColumnMetadata stringCol = schema.metadata("str");
// TIME
expect(ConversionType.NONE, conversions.analyze(timeCol, timeCol));
expect(ConversionType.EXPLICIT, conversions.analyze(timeCol, stringCol));
expect(ConversionType.EXPLICIT, conversions.analyze(stringCol, timeCol));
expect(ConversionType.IMPLICIT, conversions.analyze(intCol, timeCol));
expect(ConversionType.IMPLICIT, conversions.analyze(timeCol, intCol));
// DATE
expect(ConversionType.NONE, conversions.analyze(dateCol, dateCol));
expect(ConversionType.EXPLICIT, conversions.analyze(dateCol, stringCol));
expect(ConversionType.EXPLICIT, conversions.analyze(stringCol, dateCol));
expect(ConversionType.IMPLICIT, conversions.analyze(bigIntCol, dateCol));
expect(ConversionType.IMPLICIT, conversions.analyze(dateCol, bigIntCol));
// TIMESTAMP
expect(ConversionType.NONE, conversions.analyze(tsCol, tsCol));
expect(ConversionType.EXPLICIT, conversions.analyze(tsCol, stringCol));
expect(ConversionType.EXPLICIT, conversions.analyze(stringCol, tsCol));
expect(ConversionType.IMPLICIT, conversions.analyze(bigIntCol, tsCol));
expect(ConversionType.IMPLICIT, conversions.analyze(tsCol, bigIntCol));
// INTERVAL
expect(ConversionType.NONE, conversions.analyze(intervalCol, intervalCol));
expect(ConversionType.EXPLICIT, conversions.analyze(intervalCol, stringCol));
expect(ConversionType.EXPLICIT, conversions.analyze(stringCol, intervalCol));
// INTERVALYEAR
expect(ConversionType.NONE, conversions.analyze(yearCol, yearCol));
expect(ConversionType.EXPLICIT, conversions.analyze(yearCol, stringCol));
expect(ConversionType.EXPLICIT, conversions.analyze(stringCol, yearCol));
// INTERVALDAY
expect(ConversionType.NONE, conversions.analyze(dayCol, dayCol));
expect(ConversionType.EXPLICIT, conversions.analyze(dayCol, stringCol));
expect(ConversionType.EXPLICIT, conversions.analyze(stringCol, dayCol));
}
/**
* Test the properties for how to handle blanks on string-to-number
* conversions.
*/
@Test
public void testBlankOptions() {
// Nullable
try {
doTestBlanks(DataMode.OPTIONAL, null, null, null);
} catch (InvalidConversionError e) {
// Expected
}
doTestBlanks(DataMode.OPTIONAL, ColumnMetadata.BLANK_AS_NULL,
null, null);
doTestBlanks(DataMode.OPTIONAL, ColumnMetadata.BLANK_AS_ZERO,
null, 0);
// Non-nullable
doTestBlanks(DataMode.REQUIRED, null, null, 20);
doTestBlanks(DataMode.REQUIRED, ColumnMetadata.BLANK_AS_NULL,
null, 20);
doTestBlanks(DataMode.REQUIRED, ColumnMetadata.BLANK_AS_ZERO,
null, 0);
// Property on column
doTestBlanks(DataMode.REQUIRED, null,
ColumnMetadata.BLANK_AS_NULL, 20);
doTestBlanks(DataMode.REQUIRED, null,
ColumnMetadata.BLANK_AS_ZERO, 0);
// Properties on both: column takes precedence
doTestBlanks(DataMode.REQUIRED, ColumnMetadata.BLANK_AS_ZERO,
ColumnMetadata.BLANK_AS_NULL, 20);
doTestBlanks(DataMode.REQUIRED, ColumnMetadata.BLANK_AS_NULL,
ColumnMetadata.BLANK_AS_ZERO, 0);
}
private void doTestBlanks(DataMode mode, String frameworkOption, String colOption, Integer value) {
TupleMetadata outputSchema = new SchemaBuilder()
.add("col", MinorType.INT, mode)
.buildSchema();
ColumnMetadata colSchema = outputSchema.metadata("col");
colSchema.setProperty(ColumnMetadata.DEFAULT_VALUE_PROP, "20");
TupleMetadata inputSchema = new SchemaBuilder()
.addNullable("col", MinorType.VARCHAR)
.buildSchema();
if (colOption != null) {
colSchema = inputSchema.metadata("col");
colSchema.setProperty(ColumnMetadata.BLANK_AS_PROP, colOption);
}
Map<String, String> props = null;
if (frameworkOption != null) {
props = new HashMap<>();
props.put(ColumnMetadata.BLANK_AS_PROP, frameworkOption);
}
ConversionTestFixture testFixture = new ConversionTestFixture(fixture.allocator(), outputSchema);
testFixture.withProperties(props);
testFixture.createConvertersFor(inputSchema);
try {
testFixture
.addSingleCol("")
.addSingleCol(" ")
.addSingleCol("10")
.addSingleCol(" 11 ");
}
catch (Exception e) {
testFixture.build().clear();
throw e;
}
RowSet actual = testFixture.build();
final SingleRowSet expected = fixture.rowSetBuilder(outputSchema)
.addSingleCol(value)
.addSingleCol(value)
.addSingleCol(10)
.addSingleCol(11)
.build();
RowSetUtilities.verify(expected, actual);
}
}
|
openjdk/skara | 34,991 | bots/pr/src/test/java/org/openjdk/skara/bots/pr/IssueTests.java | /*
* Copyright (c) 2019, 2024, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package org.openjdk.skara.bots.pr;
import org.junit.jupiter.api.*;
import org.openjdk.skara.forge.*;
import org.openjdk.skara.issuetracker.*;
import org.openjdk.skara.json.JSON;
import org.openjdk.skara.test.*;
import org.openjdk.skara.vcs.Repository;
import java.io.IOException;
import java.util.*;
import java.util.regex.Pattern;
import static org.junit.jupiter.api.Assertions.*;
import static org.openjdk.skara.bots.pr.PullRequestAsserts.assertLastCommentContains;
import static org.openjdk.skara.issuetracker.jira.JiraProject.SUBCOMPONENT;
class IssueTests {
@Test
void simple(TestInfo testInfo) throws IOException {
try (var credentials = new HostCredentials(testInfo);
var tempFolder = new TemporaryDirectory()) {
var author = credentials.getHostedRepository();
var integrator = credentials.getHostedRepository();
var censusBuilder = credentials.getCensusBuilder()
.addReviewer(integrator.forge().currentUser().id())
.addCommitter(author.forge().currentUser().id());
var prBot = PullRequestBot.newBuilder()
.repo(integrator)
.censusRepo(censusBuilder.build())
.issuePRMap(new HashMap<>())
.build();
// Populate the projects repository
var localRepoFolder = tempFolder.path().resolve("localrepo");
var localRepo = CheckableRepository.init(localRepoFolder, author.repositoryType());
var masterHash = localRepo.resolve("master").orElseThrow();
assertFalse(CheckableRepository.hasBeenEdited(localRepo));
localRepo.push(masterHash, author.authenticatedUrl(), "master", true);
// Make a change with a corresponding PR
var editHash = CheckableRepository.appendAndCommit(localRepo);
localRepo.push(editHash, author.authenticatedUrl(), "edit", true);
var pr = credentials.createPullRequest(author, "master", "edit", "123: This is a pull request");
// No arguments
pr.addComment("/issue");
TestBotRunner.runPeriodicItems(prBot);
// The bot should reply with a help message
assertLastCommentContains(pr,"Command syntax:");
assertLastCommentContains(pr, "`/issue");
// Check that the alias works as well
pr.addComment("/solves");
TestBotRunner.runPeriodicItems(prBot);
// The bot should reply with a help message
assertLastCommentContains(pr,"Command syntax:");
assertLastCommentContains(pr, "`/solves");
// Invalid syntax
pr.addComment("/issue something I guess");
TestBotRunner.runPeriodicItems(prBot);
// The bot should reply with a failure message
assertLastCommentContains(pr,"Command syntax");
// Add an issue
pr.addComment("/issue 1234: An issue");
TestBotRunner.runPeriodicItems(prBot);
// The bot should reply with a success message
assertLastCommentContains(pr,"Adding additional");
// Try to remove a not-previously-added issue
pr.addComment("/issue remove 1235");
TestBotRunner.runPeriodicItems(prBot);
// The bot should reply with a failure message
assertLastCommentContains(pr,"was not found");
// Now remove the added one
pr.addComment("/issue remove 1234");
TestBotRunner.runPeriodicItems(prBot);
// The bot should reply with a success message
assertLastCommentContains(pr,"Removing additional");
// Add two more issues
pr.addComment("/issue 12345: Another issue");
pr.addComment("/issue 123456: Yet another issue");
TestBotRunner.runPeriodicItems(prBot);
// The bot should reply with a success message
assertLastCommentContains(pr,"Adding additional");
// Update the description of the first one
pr.addComment("/issue 12345: This is indeed another issue");
TestBotRunner.runPeriodicItems(prBot);
// The bot should reply with a success message
assertLastCommentContains(pr,"Updating description");
// Approve it as another user
var approvalPr = integrator.pullRequest(pr.id());
approvalPr.addReview(Review.Verdict.APPROVED, "Approved");
TestBotRunner.runPeriodicItems(prBot);
TestBotRunner.runPeriodicItems(prBot);
// The commit message preview should contain the additional issues
var preview = pr.comments().stream()
.filter(comment -> comment.body().contains("the commit message for the final commit will be"))
.map(Comment::body)
.findFirst()
.orElseThrow();
assertTrue(preview.contains("123: This is a pull request"));
assertTrue(preview.contains("12345: This is indeed another issue"));
assertTrue(preview.contains("123456: Yet another issue"));
// Integrate
pr.addComment("/integrate");
TestBotRunner.runPeriodicItems(prBot);
// The bot should reply with an ok message
assertLastCommentContains(pr,"Pushed as commit");
// The change should now be present on the master branch
var pushedFolder = tempFolder.path().resolve("pushed");
var pushedRepo = Repository.materialize(pushedFolder, author.authenticatedUrl(), "master");
assertTrue(CheckableRepository.hasBeenEdited(pushedRepo));
var headHash = pushedRepo.resolve("HEAD").orElseThrow();
var headCommit = pushedRepo.commits(headHash.hex() + "^.." + headHash.hex()).asList().get(0);
// The additional issues should be present in the commit message
assertEquals(List.of("123: This is a pull request",
"12345: This is indeed another issue",
"123456: Yet another issue",
"",
"Reviewed-by: integrationreviewer1"), headCommit.message());
}
}
@Test
void multiple(TestInfo testInfo) throws IOException {
try (var credentials = new HostCredentials(testInfo);
var tempFolder = new TemporaryDirectory()) {
var author = credentials.getHostedRepository();
var integrator = credentials.getHostedRepository();
var issues = credentials.getIssueProject();
var censusBuilder = credentials.getCensusBuilder()
.addReviewer(integrator.forge().currentUser().id())
.addCommitter(author.forge().currentUser().id());
var prBot = PullRequestBot.newBuilder()
.repo(integrator)
.censusRepo(censusBuilder.build())
.issueProject(issues)
.issuePRMap(new HashMap<>())
.build();
// Populate the projects repository
var localRepoFolder = tempFolder.path().resolve("localrepo");
var localRepo = CheckableRepository.init(localRepoFolder, author.repositoryType());
var masterHash = localRepo.resolve("master").orElseThrow();
assertFalse(CheckableRepository.hasBeenEdited(localRepo));
localRepo.push(masterHash, author.authenticatedUrl(), "master", true);
// Make a change with a corresponding PR
var editHash = CheckableRepository.appendAndCommit(localRepo);
localRepo.push(editHash, author.authenticatedUrl(), "edit", true);
var issue1 = credentials.createIssue(issues, "Main");
var issue1Number = Integer.parseInt(issue1.id().split("-")[1]);
var pr = credentials.createPullRequest(author, "master", "edit", issue1Number + ": Main");
var issue2 = credentials.createIssue(issues, "Second");
var issue2Number = Integer.parseInt(issue2.id().split("-")[1]);
var issue3 = credentials.createIssue(issues, "Third");
var issue3Number = Integer.parseInt(issue3.id().split("-")[1]);
// Add a single issue with the shorthand syntax
pr.addComment("/solves " + issue3Number);
TestBotRunner.runPeriodicItems(prBot);
assertLastCommentContains(pr, "Adding additional issue to solves list");
assertLastCommentContains(pr, ": Third");
// And remove it
pr.addComment("/solves delete " + issue3Number);
TestBotRunner.runPeriodicItems(prBot);
assertLastCommentContains(pr, "Removing additional issue from solves list: `" + issue3Number + "`");
// Add two issues with the shorthand syntax
pr.addComment("/issue " + issue2.id() + "," + issue3Number);
TestBotRunner.runPeriodicItems(prBot);
// The bot should add both
assertLastCommentContains(pr, "Adding additional issue to issue list");
assertLastCommentContains(pr, ": Second");
assertLastCommentContains(pr, ": Third");
// Update the title of issue2 and issue3
issue2.setTitle("Second2");
issue3.setTitle("Third3");
pr.setBody("update this pr");
TestBotRunner.runPeriodicItems(prBot);
// PR body shouldn't contain title mismatch warning
assertFalse(pr.store().body().contains("Title mismatch between PR and JBS for issue"));
// Remove one
pr.addComment("/issue remove " + issue2.id());
TestBotRunner.runPeriodicItems(prBot);
assertLastCommentContains(pr, "Removing additional issue from issue list: `" + issue2Number + "`");
// Approve it as another user
var approvalPr = integrator.pullRequest(pr.id());
approvalPr.addReview(Review.Verdict.APPROVED, "Approved");
TestBotRunner.runPeriodicItems(prBot);
TestBotRunner.runPeriodicItems(prBot);
// The commit message preview should contain the additional issues
var preview = pr.comments().stream()
.filter(comment -> comment.body().contains("the commit message for the final commit will be"))
.map(Comment::body)
.findFirst()
.orElseThrow();
assertTrue(preview.contains(issue1Number + ": Main"));
assertTrue(preview.contains(issue3Number + ": Third3"));
assertFalse(preview.contains("Second"));
// Integrate
pr.addComment("/integrate");
TestBotRunner.runPeriodicItems(prBot);
// The bot should reply with an ok message
assertLastCommentContains(pr,"Pushed as commit");
// The change should now be present on the master branch
var pushedFolder = tempFolder.path().resolve("pushed");
var pushedRepo = Repository.materialize(pushedFolder, author.authenticatedUrl(), "master");
assertTrue(CheckableRepository.hasBeenEdited(pushedRepo));
var headHash = pushedRepo.resolve("HEAD").orElseThrow();
var headCommit = pushedRepo.commits(headHash.hex() + "^.." + headHash.hex()).asList().get(0);
// The additional issues should be present in the commit message
assertEquals(List.of(issue1Number + ": Main",
issue3Number + ": Third3",
"",
"Reviewed-by: integrationreviewer1"), headCommit.message());
}
}
@Test
void invalidCommandAuthor(TestInfo testInfo) throws IOException {
try (var credentials = new HostCredentials(testInfo);
var tempFolder = new TemporaryDirectory()) {
var author = credentials.getHostedRepository();
var integrator = credentials.getHostedRepository();
var external = credentials.getHostedRepository();
var censusBuilder = credentials.getCensusBuilder()
.addAuthor(author.forge().currentUser().id());
var mergeBot = PullRequestBot.newBuilder()
.repo(integrator)
.censusRepo(censusBuilder.build())
.issuePRMap(new HashMap<>())
.build();
// Populate the projects repository
var localRepo = CheckableRepository.init(tempFolder.path(), author.repositoryType());
var masterHash = localRepo.resolve("master").orElseThrow();
assertFalse(CheckableRepository.hasBeenEdited(localRepo));
localRepo.push(masterHash, author.authenticatedUrl(), "master", true);
// Make a change with a corresponding PR
var editHash = CheckableRepository.appendAndCommit(localRepo);
localRepo.push(editHash, author.authenticatedUrl(), "edit", true);
var pr = credentials.createPullRequest(author, "master", "edit", "This is a pull request");
// Issue a solves command not as the PR author
var externalPr = external.pullRequest(pr.id());
externalPr.addComment("/issue 1234: an issue");
TestBotRunner.runPeriodicItems(mergeBot);
// The bot should reply with an error message
var error = pr.comments().stream()
.filter(comment -> comment.body().contains("Only the author"))
.count();
assertEquals(1, error);
}
}
@Test
void issueInTitle(TestInfo testInfo) throws IOException {
try (var credentials = new HostCredentials(testInfo);
var tempFolder = new TemporaryDirectory()) {
var author = credentials.getHostedRepository();
var integrator = credentials.getHostedRepository();
var censusBuilder = credentials.getCensusBuilder()
.addAuthor(author.forge().currentUser().id());
var prBot = PullRequestBot.newBuilder()
.repo(integrator)
.censusRepo(censusBuilder.build())
.issuePRMap(new HashMap<>())
.build();
// Populate the projects repository
var localRepo = CheckableRepository.init(tempFolder.path(), author.repositoryType());
var masterHash = localRepo.resolve("master").orElseThrow();
assertFalse(CheckableRepository.hasBeenEdited(localRepo));
localRepo.push(masterHash, author.authenticatedUrl(), "master", true);
// Make a change with a corresponding PR
var editHash = CheckableRepository.appendAndCommit(localRepo);
localRepo.push(editHash, author.authenticatedUrl(), "edit", true);
var pr = credentials.createPullRequest(author, "master", "edit", "This is a pull request");
// Add an issue
pr.addComment("/issue 1234: An issue");
TestBotRunner.runPeriodicItems(prBot);
// The bot should reply with a success message
assertLastCommentContains(pr,"current title");
assertEquals("1234: An issue", pr.store().title());
// Update the issue description
pr.addComment("/issue 1234: Yes this is an issue");
TestBotRunner.runPeriodicItems(prBot);
// The bot should reply with a success message
assertLastCommentContains(pr,"will now be updated");
assertEquals("1234: Yes this is an issue", pr.store().title());
}
}
@Test
void issueInBody(TestInfo testInfo) throws IOException {
try (var credentials = new HostCredentials(testInfo);
var tempFolder = new TemporaryDirectory()) {
var author = credentials.getHostedRepository();
var integrator = credentials.getHostedRepository();
var issues = credentials.getIssueProject();
var censusBuilder = credentials.getCensusBuilder()
.addAuthor(author.forge().currentUser().id());
var prBot = PullRequestBot.newBuilder()
.repo(integrator)
.censusRepo(censusBuilder.build())
.issueProject(issues)
.issuePRMap(new HashMap<>())
.build();
// Populate the projects repository
var localRepo = CheckableRepository.init(tempFolder.path(), author.repositoryType());
var masterHash = localRepo.resolve("master").orElseThrow();
assertFalse(CheckableRepository.hasBeenEdited(localRepo));
localRepo.push(masterHash, author.authenticatedUrl(), "master", true);
// Make a change with a corresponding PR
var editHash = CheckableRepository.appendAndCommit(localRepo);
localRepo.push(editHash, author.authenticatedUrl(), "edit", true);
var issue1 = issues.createIssue("First", List.of("Hello"), Map.of());
var pr = credentials.createPullRequest(author, "master", "edit",
issue1.id() + ": This is a pull request");
// First check
TestBotRunner.runPeriodicItems(prBot);
assertTrue(pr.store().body().contains(issue1.id()));
assertTrue(pr.store().body().contains("First"));
assertTrue(pr.store().body().contains("## Issue\n"));
// Add an extra issue
var issue2 = issues.createIssue("Second", List.of("There"), Map.of());
pr.addComment("/issue " + issue2.id() + ": Description");
// Check that the body was updated
TestBotRunner.runPeriodicItems(prBot);
TestBotRunner.runPeriodicItems(prBot);
assertTrue(pr.store().body().contains(issue1.id()));
assertTrue(pr.store().body().contains("First"));
assertTrue(pr.store().body().contains(issue2.id()));
assertTrue(pr.store().body().contains("Second"));
assertFalse(pr.store().body().contains("## Issue\n"));
assertTrue(pr.store().body().contains("## Issues\n"));
}
}
@Test
void closedIssue(TestInfo testInfo) throws IOException {
try (var credentials = new HostCredentials(testInfo);
var tempFolder = new TemporaryDirectory()) {
var author = credentials.getHostedRepository();
var integrator = credentials.getHostedRepository();
var issues = credentials.getIssueProject();
var censusBuilder = credentials.getCensusBuilder()
.addAuthor(author.forge().currentUser().id());
var prBot = PullRequestBot.newBuilder()
.repo(integrator)
.censusRepo(censusBuilder.build())
.issueProject(issues)
.issuePRMap(new HashMap<>())
.build();
// Populate the projects repository
var localRepo = CheckableRepository.init(tempFolder.path(), author.repositoryType());
var masterHash = localRepo.resolve("master").orElseThrow();
assertFalse(CheckableRepository.hasBeenEdited(localRepo));
localRepo.push(masterHash, author.authenticatedUrl(), "master", true);
// Make a change with a corresponding PR
var editHash = CheckableRepository.appendAndCommit(localRepo);
localRepo.push(editHash, author.authenticatedUrl(), "edit", true);
var issue1 = (TestIssueTrackerIssue) issues.createIssue("First", List.of("Hello"), Map.of());
issue1.setState(Issue.State.CLOSED);
issue1.store().properties().put("resolution", JSON.object().put("name", JSON.of("Not an Issue")));
var pr = credentials.createPullRequest(author, "master", "edit",
issue1.id() + ": This is a pull request");
// First check
TestBotRunner.runPeriodicItems(prBot);
assertTrue(pr.store().body().contains(issue1.id()));
assertTrue(pr.store().body().contains("First"));
assertTrue(pr.store().body().contains("## Issue\n"));
assertTrue(pr.store().body().contains("Issue is not open"));
}
}
@Test
void resolvedIssue(TestInfo testInfo) throws IOException {
try (var credentials = new HostCredentials(testInfo);
var tempFolder = new TemporaryDirectory()) {
var author = credentials.getHostedRepository();
var integrator = credentials.getHostedRepository();
var issues = credentials.getIssueProject();
var censusBuilder = credentials.getCensusBuilder()
.addAuthor(author.forge().currentUser().id());
var prBot = PullRequestBot.newBuilder()
.repo(integrator)
.censusRepo(censusBuilder.build())
.issueProject(issues)
.issuePRMap(new HashMap<>())
.build();
// Populate the projects repository
var localRepo = CheckableRepository.init(tempFolder.path(), author.repositoryType());
var masterHash = localRepo.resolve("master").orElseThrow();
assertFalse(CheckableRepository.hasBeenEdited(localRepo));
localRepo.push(masterHash, author.authenticatedUrl(), "master", true);
// Make a change with a corresponding PR
var editHash = CheckableRepository.appendAndCommit(localRepo);
localRepo.push(editHash, author.authenticatedUrl(), "edit", true);
var issue1 = (TestIssueTrackerIssue) issues.createIssue("First", List.of("Hello"), Map.of());
issue1.setState(Issue.State.RESOLVED);
var pr = credentials.createPullRequest(author, "master", "edit",
issue1.id() + ": This is a pull request");
// First check
TestBotRunner.runPeriodicItems(prBot);
assertTrue(pr.store().body().contains(issue1.id()));
assertTrue(pr.store().body().contains("First"));
assertTrue(pr.store().body().contains("## Issue\n"));
assertTrue(pr.store().body().contains("Consider making this a \"backport pull request\" by setting"));
}
}
@Test
void closedIssueBackport(TestInfo testInfo) throws IOException {
try (var credentials = new HostCredentials(testInfo);
var tempFolder = new TemporaryDirectory()) {
var author = credentials.getHostedRepository();
var integrator = credentials.getHostedRepository();
var issues = credentials.getIssueProject();
var censusBuilder = credentials.getCensusBuilder()
.addAuthor(author.forge().currentUser().id());
var prBot = PullRequestBot.newBuilder()
.repo(integrator)
.censusRepo(censusBuilder.build())
.issueProject(issues)
.issuePRMap(new HashMap<>())
.build();
// Populate the projects repository
var localRepo = CheckableRepository.init(tempFolder.path(), author.repositoryType());
var masterHash = localRepo.resolve("master").orElseThrow();
assertFalse(CheckableRepository.hasBeenEdited(localRepo));
localRepo.push(masterHash, author.authenticatedUrl(), "master", true);
// Make a change with a corresponding PR
var editHash = CheckableRepository.appendAndCommit(localRepo);
localRepo.push(editHash, author.authenticatedUrl(), "edit", true);
var issue1 = issues.createIssue("First", List.of("Hello"), Map.of());
issue1.setState(Issue.State.RESOLVED);
var pr = credentials.createPullRequest(author, "master", "edit",
issue1.id() + ": This is a pull request");
pr.addLabel("backport");
// First check
TestBotRunner.runPeriodicItems(prBot);
assertTrue(pr.store().body().contains(issue1.id()));
assertTrue(pr.store().body().contains("First"));
assertTrue(pr.store().body().contains("## Issue\n"));
assertFalse(pr.store().body().contains("Issue is not open"));
}
}
private static final Pattern addedIssuePattern = Pattern.compile("`(.*)` was successfully created", Pattern.MULTILINE);
private static IssueTrackerIssue issueFromLastComment(PullRequest pr, IssueProject issueProject) {
var comments = pr.comments();
var lastComment = comments.getLast();
var addedIssueMatcher = addedIssuePattern.matcher(lastComment.body());
assertTrue(addedIssueMatcher.find(), lastComment.body());
return issueProject.issue(addedIssueMatcher.group(1)).orElseThrow();
}
@Test
void projectPrefix(TestInfo testInfo) throws IOException {
try (var credentials = new HostCredentials(testInfo);
var tempFolder = new TemporaryDirectory()) {
var author = credentials.getHostedRepository();
var integrator = credentials.getHostedRepository();
var issueProject = credentials.getIssueProject();
var censusBuilder = credentials.getCensusBuilder()
.addAuthor(author.forge().currentUser().id());
var prBot = PullRequestBot.newBuilder()
.repo(integrator)
.censusRepo(censusBuilder.build())
.issueProject(issueProject)
.issuePRMap(new HashMap<>())
.build();
// Populate the projects repository
var localRepo = CheckableRepository.init(tempFolder.path(), author.repositoryType());
var masterHash = localRepo.resolve("master").orElseThrow();
assertFalse(CheckableRepository.hasBeenEdited(localRepo));
localRepo.push(masterHash, author.authenticatedUrl(), "master", true);
// Create issues
var issue1 = credentials.createIssue(issueProject, "Issue 1");
var issue2 = credentials.createIssue(issueProject, "Issue 2");
// Make a change with a corresponding PR
var editHash = CheckableRepository.appendAndCommit(localRepo);
localRepo.push(editHash, author.authenticatedUrl(), "edit", true);
var pr = credentials.createPullRequest(author, "master", "edit", issue1.id() + ": This is a pull request");
TestBotRunner.runPeriodicItems(prBot);
// Add variations of this issue
pr.addComment("/issue add " + issue2.id().toLowerCase());
TestBotRunner.runPeriodicItems(prBot);
// The bot should reply with a success message
assertLastCommentContains(pr,"Adding additional issue to issue list");
pr.addComment("/issue remove " + issue2.id().toLowerCase());
TestBotRunner.runPeriodicItems(prBot);
// The bot should reply with a success message
assertLastCommentContains(pr,"Removing additional issue from issue list");
// Add variations of this issue
pr.addComment("/issue add " + issue2.id().toUpperCase());
TestBotRunner.runPeriodicItems(prBot);
// The bot should reply with a success message
assertLastCommentContains(pr,"Adding additional issue to issue list");
pr.addComment("/issue remove " + issue2.id().toUpperCase());
TestBotRunner.runPeriodicItems(prBot);
// The bot should reply with a success message
assertLastCommentContains(pr,"Removing additional issue from issue list");
// Add variations of this issue
pr.addComment("/issue add " + issue2.id().split("-")[1]);
TestBotRunner.runPeriodicItems(prBot);
// The bot should reply with a success message
assertLastCommentContains(pr,"Adding additional issue to issue list");
}
}
@Test
void multipleIssuesInBody(TestInfo testInfo) throws IOException {
try (var credentials = new HostCredentials(testInfo);
var tempFolder = new TemporaryDirectory()) {
var author = credentials.getHostedRepository();
var integrator = credentials.getHostedRepository();
var issueProject = credentials.getIssueProject();
var censusBuilder = credentials.getCensusBuilder()
.addAuthor(author.forge().currentUser().id());
var prBot = PullRequestBot.newBuilder()
.repo(integrator)
.censusRepo(censusBuilder.build())
.issueProject(issueProject)
.issuePRMap(new HashMap<>())
.build();
// Populate the projects repository
var localRepo = CheckableRepository.init(tempFolder.path(), author.repositoryType());
var masterHash = localRepo.resolve("master").orElseThrow();
assertFalse(CheckableRepository.hasBeenEdited(localRepo));
localRepo.push(masterHash, author.authenticatedUrl(), "master", true);
var issue1 = credentials.createIssue(issueProject, "Issue 1");
var issue2 = credentials.createIssue(issueProject, "Issue 2");
var issue3 = credentials.createIssue(issueProject, "Issue 3");
// Make a change with a corresponding PR
var editHash = CheckableRepository.appendAndCommit(localRepo);
localRepo.push(editHash, author.authenticatedUrl(), "edit", true);
var pr = credentials.createPullRequest(author, "master", "edit", "Pull request title",
List.of("/issue add " + issue1.id(),
"/issue add " + issue2.id(),
"/issue add " + issue3.id()));
TestBotRunner.runPeriodicItems(prBot);
// The first issue should be the title
assertTrue(pr.title().startsWith(issue1.id().split("-")[1] + ": "));
var comments = pr.comments();
assertEquals(4, comments.size());
assertTrue(comments.get(1).body().contains("current title does not contain an issue reference"));
assertTrue(comments.get(2).body().contains("Adding additional issue to"));
assertTrue(comments.get(3).body().contains("Adding additional issue to"));
}
}
@Test
void issueMissing(TestInfo testInfo) throws IOException {
try (var credentials = new HostCredentials(testInfo);
var tempFolder = new TemporaryDirectory()) {
var author = credentials.getHostedRepository();
var integrator = credentials.getHostedRepository();
var issues = credentials.getIssueProject();
var censusBuilder = credentials.getCensusBuilder()
.addReviewer(integrator.forge().currentUser().id())
.addCommitter(author.forge().currentUser().id());
var prBot = PullRequestBot.newBuilder()
.repo(integrator)
.censusRepo(censusBuilder.build())
.issueProject(issues)
.issuePRMap(new HashMap<>())
.build();
// Populate the projects repository
var localRepoFolder = tempFolder.path().resolve("localrepo");
var localRepo = CheckableRepository.init(localRepoFolder, author.repositoryType());
var masterHash = localRepo.resolve("master").orElseThrow();
assertFalse(CheckableRepository.hasBeenEdited(localRepo));
localRepo.push(masterHash, author.authenticatedUrl(), "master", true);
// Make a change with a corresponding PR with a non-existing issue ID
var editHash = CheckableRepository.appendAndCommit(localRepo);
localRepo.push(editHash, author.authenticatedUrl(), "edit", true);
var pr = credentials.createPullRequest(author, "master", "edit", "123: This is a PR");
// Approve it as another user
var approvalPr = integrator.pullRequest(pr.id());
approvalPr.addReview(Review.Verdict.APPROVED, "Approved");
TestBotRunner.runPeriodicItems(prBot);
TestBotRunner.runPeriodicItems(prBot);
// There should be no commit preview message
var previewComment = pr.comments().stream()
.map(Comment::body)
.filter(body -> body.contains("the commit message for the final commit will be"))
.findFirst();
assertEquals(Optional.empty(), previewComment, "Preview comment should not have been posted");
// Body should contain integration blocker
assertTrue(pr.store().body().contains("Integration blocker"), "Body does not report integration blocker");
assertTrue(pr.store().body().contains("Failed to retrieve information on issue `123`"),
"Body does not contain specific message");
}
}
}
|
apache/ignite-3 | 35,260 | modules/table/src/testFixtures/java/org/apache/ignite/internal/table/impl/DummyInternalTableImpl.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.table.impl;
import static java.util.concurrent.CompletableFuture.completedFuture;
import static org.apache.ignite.internal.lang.IgniteSystemProperties.colocationEnabled;
import static org.apache.ignite.internal.replicator.ReplicatorConstants.DEFAULT_IDLE_SAFE_TIME_PROPAGATION_PERIOD_MILLISECONDS;
import static org.apache.ignite.internal.testframework.IgniteTestUtils.deriveUuidFrom;
import static org.apache.ignite.internal.testframework.matchers.CompletableFutureMatcher.willCompleteSuccessfully;
import static org.apache.ignite.internal.util.CompletableFutures.nullCompletedFuture;
import static org.apache.ignite.lang.ErrorGroups.Common.INTERNAL_ERR;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyInt;
import static org.mockito.ArgumentMatchers.anyLong;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.Mockito.lenient;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import java.io.Serializable;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.Executor;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.ignite.distributed.TestPartitionDataStorage;
import org.apache.ignite.internal.TestHybridClock;
import org.apache.ignite.internal.catalog.Catalog;
import org.apache.ignite.internal.catalog.CatalogService;
import org.apache.ignite.internal.catalog.descriptors.CatalogIndexDescriptor;
import org.apache.ignite.internal.catalog.descriptors.CatalogTableDescriptor;
import org.apache.ignite.internal.components.SystemPropertiesNodeProperties;
import org.apache.ignite.internal.configuration.SystemDistributedConfiguration;
import org.apache.ignite.internal.failure.FailureProcessor;
import org.apache.ignite.internal.hlc.ClockService;
import org.apache.ignite.internal.hlc.HybridClock;
import org.apache.ignite.internal.hlc.HybridClockImpl;
import org.apache.ignite.internal.hlc.HybridTimestamp;
import org.apache.ignite.internal.hlc.HybridTimestampTracker;
import org.apache.ignite.internal.hlc.TestClockService;
import org.apache.ignite.internal.logger.IgniteLogger;
import org.apache.ignite.internal.logger.Loggers;
import org.apache.ignite.internal.lowwatermark.TestLowWatermark;
import org.apache.ignite.internal.manager.ComponentContext;
import org.apache.ignite.internal.metrics.NoOpMetricManager;
import org.apache.ignite.internal.network.AbstractMessagingService;
import org.apache.ignite.internal.network.ChannelType;
import org.apache.ignite.internal.network.ClusterNodeImpl;
import org.apache.ignite.internal.network.ClusterNodeResolver;
import org.apache.ignite.internal.network.ClusterService;
import org.apache.ignite.internal.network.InternalClusterNode;
import org.apache.ignite.internal.network.NetworkMessage;
import org.apache.ignite.internal.network.SingleClusterNodeResolver;
import org.apache.ignite.internal.network.TopologyService;
import org.apache.ignite.internal.network.serialization.MessageSerializer;
import org.apache.ignite.internal.partition.replicator.ZonePartitionReplicaListener;
import org.apache.ignite.internal.partition.replicator.raft.ZonePartitionRaftListener;
import org.apache.ignite.internal.partition.replicator.raft.snapshot.PartitionDataStorage;
import org.apache.ignite.internal.partition.replicator.raft.snapshot.PartitionKey;
import org.apache.ignite.internal.partition.replicator.raft.snapshot.outgoing.PartitionSnapshots;
import org.apache.ignite.internal.partition.replicator.raft.snapshot.outgoing.PartitionsSnapshots;
import org.apache.ignite.internal.placementdriver.PlacementDriver;
import org.apache.ignite.internal.placementdriver.ReplicaMeta;
import org.apache.ignite.internal.placementdriver.TestPlacementDriver;
import org.apache.ignite.internal.raft.Command;
import org.apache.ignite.internal.raft.Peer;
import org.apache.ignite.internal.raft.RaftGroupConfiguration;
import org.apache.ignite.internal.raft.WriteCommand;
import org.apache.ignite.internal.raft.service.CommandClosure;
import org.apache.ignite.internal.raft.service.LeaderWithTerm;
import org.apache.ignite.internal.raft.service.RaftGroupListener;
import org.apache.ignite.internal.raft.service.RaftGroupService;
import org.apache.ignite.internal.replicator.ReplicaResult;
import org.apache.ignite.internal.replicator.ReplicaService;
import org.apache.ignite.internal.replicator.ReplicationGroupId;
import org.apache.ignite.internal.replicator.TablePartitionId;
import org.apache.ignite.internal.replicator.ZonePartitionId;
import org.apache.ignite.internal.replicator.command.SafeTimePropagatingCommand;
import org.apache.ignite.internal.replicator.configuration.ReplicationConfiguration;
import org.apache.ignite.internal.replicator.listener.ReplicaListener;
import org.apache.ignite.internal.replicator.message.PrimaryReplicaChangeCommand;
import org.apache.ignite.internal.replicator.message.ReplicaMessagesFactory;
import org.apache.ignite.internal.replicator.message.TimestampAwareReplicaResponse;
import org.apache.ignite.internal.schema.AlwaysSyncedSchemaSyncService;
import org.apache.ignite.internal.schema.BinaryRow;
import org.apache.ignite.internal.schema.BinaryRowConverter;
import org.apache.ignite.internal.schema.BinaryRowEx;
import org.apache.ignite.internal.schema.ColumnsExtractor;
import org.apache.ignite.internal.schema.SchemaDescriptor;
import org.apache.ignite.internal.sql.SqlCommon;
import org.apache.ignite.internal.storage.MvPartitionStorage;
import org.apache.ignite.internal.storage.engine.MvTableStorage;
import org.apache.ignite.internal.storage.impl.TestMvPartitionStorage;
import org.apache.ignite.internal.storage.index.StorageHashIndexDescriptor;
import org.apache.ignite.internal.storage.index.StorageHashIndexDescriptor.StorageHashIndexColumnDescriptor;
import org.apache.ignite.internal.storage.index.impl.TestHashIndexStorage;
import org.apache.ignite.internal.table.StreamerReceiverRunner;
import org.apache.ignite.internal.table.TableTestUtils;
import org.apache.ignite.internal.table.distributed.HashIndexLocker;
import org.apache.ignite.internal.table.distributed.IndexLocker;
import org.apache.ignite.internal.table.distributed.StorageUpdateHandler;
import org.apache.ignite.internal.table.distributed.TableIndexStoragesSupplier;
import org.apache.ignite.internal.table.distributed.TableSchemaAwareIndexStorage;
import org.apache.ignite.internal.table.distributed.index.IndexMetaStorage;
import org.apache.ignite.internal.table.distributed.index.IndexUpdateHandler;
import org.apache.ignite.internal.table.distributed.raft.MinimumRequiredTimeCollectorService;
import org.apache.ignite.internal.table.distributed.raft.PartitionListener;
import org.apache.ignite.internal.table.distributed.replicator.PartitionReplicaListener;
import org.apache.ignite.internal.table.distributed.replicator.TransactionStateResolver;
import org.apache.ignite.internal.table.distributed.storage.InternalTableImpl;
import org.apache.ignite.internal.table.metrics.TableMetricSource;
import org.apache.ignite.internal.thread.IgniteThreadFactory;
import org.apache.ignite.internal.tx.InternalTransaction;
import org.apache.ignite.internal.tx.TxManager;
import org.apache.ignite.internal.tx.configuration.TransactionConfiguration;
import org.apache.ignite.internal.tx.impl.HeapLockManager;
import org.apache.ignite.internal.tx.impl.RemotelyTriggeredResourceRegistry;
import org.apache.ignite.internal.tx.impl.TransactionIdGenerator;
import org.apache.ignite.internal.tx.impl.TransactionInflights;
import org.apache.ignite.internal.tx.impl.TxManagerImpl;
import org.apache.ignite.internal.tx.storage.state.test.TestTxStateStorage;
import org.apache.ignite.internal.tx.test.TestLocalRwTxCounter;
import org.apache.ignite.internal.util.Lazy;
import org.apache.ignite.internal.util.PendingComparableValuesTracker;
import org.apache.ignite.internal.util.SafeTimeValuesTracker;
import org.apache.ignite.network.NetworkAddress;
import org.apache.ignite.table.QualifiedName;
import org.apache.ignite.table.QualifiedNameHelper;
import org.apache.ignite.tx.TransactionException;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.annotations.TestOnly;
/**
* Dummy table storage implementation.
*/
@TestOnly
public class DummyInternalTableImpl extends InternalTableImpl {
public static final IgniteLogger LOG = Loggers.forClass(DummyInternalTableImpl.class);
public static final NetworkAddress ADDR = new NetworkAddress("127.0.0.1", 2004);
public static final InternalClusterNode LOCAL_NODE = new ClusterNodeImpl(new UUID(1, 2), "node", ADDR);
// 2000 was picked to avoid negative time that we get when building read timestamp
// in TxManagerImpl.currentReadTimestamp.
// We subtract (ReplicaManager.IDLE_SAFE_TIME_PROPAGATION_PERIOD_MILLISECONDS + HybridTimestamp.CLOCK_SKEW) = (1000 + 7) = 1007
// from the current time.
// Any value greater than that will work, hence 2000.
public static final HybridClock CLOCK = new TestHybridClock(() -> 2000);
private static final ClockService CLOCK_SERVICE = new TestClockService(CLOCK);
/** ID of the zone to which the corresponding table belongs. */
public static final int ZONE_ID = 2;
private static final int PART_ID = 0;
private static final ReplicationGroupId crossTableGroupId = new TablePartitionId(333, 0);
private static final ReplicaMessagesFactory REPLICA_MESSAGES_FACTORY = new ReplicaMessagesFactory();
private RaftGroupListener partitionListener;
private ReplicaListener replicaListener;
private final ReplicationGroupId groupId;
/** The thread updates safe time on the dummy replica. */
private final SafeTimeValuesTracker safeTime;
private final Object raftServiceMutex = new Object();
private static final AtomicInteger nextTableId = new AtomicInteger(10_001);
private static final ScheduledExecutorService COMMON_SCHEDULER = Executors.newSingleThreadScheduledExecutor(
IgniteThreadFactory.create("node", "DummyInternalTable-common-scheduler-", true, LOG)
);
private final boolean enabledColocation = colocationEnabled();
/**
* Creates a new local table.
*
* @param replicaSvc Replica service.
* @param schema Schema.
* @param txConfiguration Transaction configuration.
* @param systemCfg System configuration.
* @param replicationConfiguration Replication configuration.
*/
public DummyInternalTableImpl(
ReplicaService replicaSvc,
SchemaDescriptor schema,
TransactionConfiguration txConfiguration,
SystemDistributedConfiguration systemCfg,
ReplicationConfiguration replicationConfiguration
) {
this(
replicaSvc,
new TestPlacementDriver(LOCAL_NODE),
new TestMvPartitionStorage(0),
schema,
txConfiguration,
systemCfg,
replicationConfiguration
);
}
/**
* Creates a new local table.
*
* @param replicaSvc Replica service.
* @param placementDriver Placement driver.
* @param storage Storage.
* @param schema Schema.
* @param txConfiguration Transaction configuration.
* @param systemCfg System configuration.
* @param replicationConfiguration Replication Configuration.
*/
public DummyInternalTableImpl(
ReplicaService replicaSvc,
PlacementDriver placementDriver,
MvPartitionStorage storage,
SchemaDescriptor schema,
TransactionConfiguration txConfiguration,
SystemDistributedConfiguration systemCfg,
ReplicationConfiguration replicationConfiguration
) {
this(
replicaSvc,
storage,
false,
null,
schema,
HybridTimestampTracker.atomicTracker(null),
placementDriver,
replicationConfiguration,
txConfiguration,
systemCfg,
new RemotelyTriggeredResourceRegistry(),
new TransactionInflights(placementDriver, CLOCK_SERVICE)
);
}
/**
* Creates a new local table.
*
* @param replicaSvc Replica service.
* @param mvPartStorage Multi version partition storage.
* @param crossTableUsage If this dummy table is going to be used in cross-table tests, it won't mock the calls of
* ReplicaService by itself.
* @param transactionStateResolver Transaction state resolver.
* @param schema Schema descriptor.
* @param tracker Observable timestamp tracker.
* @param placementDriver Placement driver.
* @param replicationConfiguration Replication configuration.
* @param txConfiguration Transaction configuration.
* @param systemCfg System configuration.
* @param resourcesRegistry Resource registry.
* @param transactionInflights Inflights.
*/
public DummyInternalTableImpl(
ReplicaService replicaSvc,
MvPartitionStorage mvPartStorage,
boolean crossTableUsage,
@Nullable TransactionStateResolver transactionStateResolver,
SchemaDescriptor schema,
HybridTimestampTracker tracker,
PlacementDriver placementDriver,
ReplicationConfiguration replicationConfiguration,
TransactionConfiguration txConfiguration,
SystemDistributedConfiguration systemCfg,
RemotelyTriggeredResourceRegistry resourcesRegistry,
TransactionInflights transactionInflights
) {
super(
QualifiedNameHelper.fromNormalized(SqlCommon.DEFAULT_SCHEMA_NAME, "test"),
ZONE_ID, // zone id.
nextTableId.getAndIncrement(), // table id.
1, // number of partitions.
new SingleClusterNodeResolver(LOCAL_NODE),
txManager(replicaSvc, placementDriver, txConfiguration, systemCfg, resourcesRegistry),
mock(MvTableStorage.class),
new TestTxStateStorage(),
replicaSvc,
CLOCK_SERVICE,
tracker,
placementDriver,
transactionInflights,
null,
mock(StreamerReceiverRunner.class),
() -> 10_000L,
() -> 10_000L,
colocationEnabled(),
new TableMetricSource(QualifiedName.fromSimple("test"))
);
RaftGroupService svc = mock(RaftGroupService.class);
groupId = crossTableUsage ? new TablePartitionId(tableId(), PART_ID) : crossTableGroupId;
lenient().doReturn(groupId).when(svc).groupId();
Peer leaderPeer = new Peer(UUID.randomUUID().toString());
lenient().doReturn(leaderPeer).when(svc).leader();
lenient().doReturn(completedFuture(new LeaderWithTerm(leaderPeer, 1L))).when(svc).refreshAndGetLeaderWithTerm();
if (!crossTableUsage) {
// Delegate replica requests directly to replica listener.
lenient()
.doAnswer(invocationOnMock -> {
InternalClusterNode node = invocationOnMock.getArgument(0);
return replicaListener.invoke(invocationOnMock.getArgument(1), node.id()).thenApply(ReplicaResult::result);
})
.when(replicaSvc).invoke(any(InternalClusterNode.class), any());
lenient()
.doAnswer(invocationOnMock -> {
String nodeConsistenId = invocationOnMock.getArgument(0);
UUID nodeId = deriveUuidFrom(nodeConsistenId);
return replicaListener.invoke(invocationOnMock.getArgument(1), nodeId).thenApply(ReplicaResult::result);
})
.when(replicaSvc).invoke(anyString(), any());
lenient()
.doAnswer(invocationOnMock -> {
InternalClusterNode node = invocationOnMock.getArgument(0);
return replicaListener.invoke(invocationOnMock.getArgument(1), node.id())
.thenApply(DummyInternalTableImpl::dummyTimestampAwareResponse);
})
.when(replicaSvc).invokeRaw(any(InternalClusterNode.class), any());
lenient()
.doAnswer(invocationOnMock -> {
String nodeConsistenId = invocationOnMock.getArgument(0);
UUID nodeId = deriveUuidFrom(nodeConsistenId);
return replicaListener.invoke(invocationOnMock.getArgument(1), nodeId)
.thenApply(DummyInternalTableImpl::dummyTimestampAwareResponse);
})
.when(replicaSvc).invokeRaw(anyString(), any());
}
AtomicLong raftIndex = new AtomicLong(1);
// Delegate directly to listener.
lenient().doAnswer(
invocationClose -> {
synchronized (raftServiceMutex) {
Command cmd = invocationClose.getArgument(0);
long commandIndex = raftIndex.incrementAndGet();
HybridTimestamp safeTs = cmd instanceof SafeTimePropagatingCommand ? CLOCK.now() : null;
CompletableFuture<Serializable> res = new CompletableFuture<>();
// All read commands are handled directly throw partition replica listener.
CommandClosure<WriteCommand> clo = new CommandClosure<>() {
/** {@inheritDoc} */
@Override
public long index() {
return commandIndex;
}
/** {@inheritDoc} */
@Override
public HybridTimestamp safeTimestamp() {
return safeTs;
}
/** {@inheritDoc} */
@Override
public @Nullable WriteCommand command() {
return (WriteCommand) cmd;
}
/** {@inheritDoc} */
@Override
public void result(@Nullable Serializable r) {
if (r instanceof Throwable) {
res.completeExceptionally((Throwable) r);
} else {
res.complete(r);
}
}
};
try {
partitionListener.onWrite(List.of(clo).iterator());
} catch (Throwable e) {
res.completeExceptionally(new TransactionException(INTERNAL_ERR, e));
}
return res;
}
}
).when(svc).run(any());
int tableId = tableId();
int indexId = 1;
ColumnsExtractor row2Tuple = BinaryRowConverter.keyExtractor(schema);
StorageHashIndexDescriptor pkIndexDescriptor = mock(StorageHashIndexDescriptor.class);
when(pkIndexDescriptor.columns()).then(
invocation -> Collections.nCopies(schema.keyColumns().size(), mock(StorageHashIndexColumnDescriptor.class))
);
Lazy<TableSchemaAwareIndexStorage> pkStorage = new Lazy<>(() -> new TableSchemaAwareIndexStorage(
indexId,
new TestHashIndexStorage(PART_ID, pkIndexDescriptor),
row2Tuple
));
IndexLocker pkLocker = new HashIndexLocker(indexId, true, this.txManager.lockManager(), row2Tuple);
safeTime = new SafeTimeValuesTracker(HybridTimestamp.MIN_VALUE);
PartitionDataStorage partitionDataStorage = new TestPartitionDataStorage(tableId, PART_ID, mvPartStorage);
TableIndexStoragesSupplier indexes = createTableIndexStoragesSupplier(Map.of(pkStorage.get().id(), pkStorage.get()));
IndexUpdateHandler indexUpdateHandler = new IndexUpdateHandler(indexes);
StorageUpdateHandler storageUpdateHandler = new StorageUpdateHandler(
PART_ID,
partitionDataStorage,
indexUpdateHandler,
replicationConfiguration,
TableTestUtils.NOOP_PARTITION_MODIFICATION_COUNTER
);
DummySchemaManagerImpl schemaManager = new DummySchemaManagerImpl(schema);
Catalog catalog = mock(Catalog.class);
CatalogService catalogService = mock(CatalogService.class);
CatalogTableDescriptor tableDescriptor = mock(CatalogTableDescriptor.class);
lenient().when(catalogService.catalog(anyInt())).thenReturn(catalog);
lenient().when(catalogService.activeCatalog(anyLong())).thenReturn(catalog);
lenient().when(catalog.table(anyInt())).thenReturn(tableDescriptor);
lenient().when(tableDescriptor.latestSchemaVersion()).thenReturn(1);
CatalogIndexDescriptor indexDescriptor = mock(CatalogIndexDescriptor.class);
lenient().when(indexDescriptor.id()).thenReturn(pkStorage.get().id());
lenient().when(catalog.indexes(anyInt())).thenReturn(List.of(indexDescriptor));
ZonePartitionId zonePartitionId = new ZonePartitionId(ZONE_ID, PART_ID);
TablePartitionId tablePartitionId = new TablePartitionId(tableId, PART_ID);
var tableReplicaListener = new PartitionReplicaListener(
mvPartStorage,
svc,
this.txManager,
this.txManager.lockManager(),
Runnable::run,
tablePartitionId,
tableId,
() -> Map.of(pkLocker.id(), pkLocker),
pkStorage,
Map::of,
CLOCK_SERVICE,
safeTime,
txStateStorage().getOrCreatePartitionStorage(PART_ID),
transactionStateResolver,
storageUpdateHandler,
new DummyValidationSchemasSource(schemaManager),
LOCAL_NODE,
new AlwaysSyncedSchemaSyncService(),
catalogService,
placementDriver,
mock(ClusterNodeResolver.class),
resourcesRegistry,
schemaManager,
mock(IndexMetaStorage.class),
new TestLowWatermark(),
mock(FailureProcessor.class),
new SystemPropertiesNodeProperties(),
new TableMetricSource(QualifiedName.fromSimple("dummy_table"))
);
if (enabledColocation) {
ZonePartitionReplicaListener zoneReplicaListener = new ZonePartitionReplicaListener(
txStateStorage().getOrCreatePartitionStorage(PART_ID),
CLOCK_SERVICE,
this.txManager,
new DummyValidationSchemasSource(schemaManager),
new AlwaysSyncedSchemaSyncService(),
catalogService,
placementDriver,
mock(ClusterNodeResolver.class),
svc,
mock(FailureProcessor.class),
new SystemPropertiesNodeProperties(),
LOCAL_NODE,
zonePartitionId
);
zoneReplicaListener.addTableReplicaProcessor(tableId, raftClient -> tableReplicaListener);
replicaListener = zoneReplicaListener;
} else {
replicaListener = tableReplicaListener;
}
HybridClock clock = new HybridClockImpl();
ClockService clockService = mock(ClockService.class);
lenient().when(clockService.current()).thenReturn(clock.current());
PendingComparableValuesTracker<Long, Void> storageIndexTracker = new PendingComparableValuesTracker<>(0L);
var tablePartitionListener = new PartitionListener(
this.txManager,
new TestPartitionDataStorage(tableId, PART_ID, mvPartStorage),
storageUpdateHandler,
txStateStorage().getOrCreatePartitionStorage(PART_ID),
safeTime,
storageIndexTracker,
catalogService,
schemaManager,
mock(IndexMetaStorage.class),
LOCAL_NODE.id(),
mock(MinimumRequiredTimeCollectorService.class),
mock(Executor.class),
placementDriver,
clockService,
new SystemPropertiesNodeProperties(),
enabledColocation ? zonePartitionId : tablePartitionId
);
if (enabledColocation) {
ZonePartitionRaftListener zoneRaftListener = new ZonePartitionRaftListener(
zonePartitionId,
txStateStorage().getOrCreatePartitionStorage(PART_ID),
this.txManager,
safeTime,
storageIndexTracker,
new NoOpPartitionsSnapshots(),
mock(Executor.class)
);
zoneRaftListener.addTableProcessor(tableId, tablePartitionListener);
partitionListener = zoneRaftListener;
} else {
partitionListener = tablePartitionListener;
}
// Update(All)Command handling requires both information about raft group topology and the primary replica,
// thus onConfigurationCommited and primaryReplicaChangeCommand are called.
{
partitionListener.onConfigurationCommitted(
new RaftGroupConfiguration(
1,
1,
List.of(LOCAL_NODE.name()),
Collections.emptyList(),
null,
null
),
1,
1
);
CompletableFuture<ReplicaMeta> primaryMetaFuture = placementDriver.getPrimaryReplica(groupId, CLOCK.now());
assertThat(primaryMetaFuture, willCompleteSuccessfully());
ReplicaMeta primary = primaryMetaFuture.join();
PrimaryReplicaChangeCommand primaryReplicaChangeCommand = REPLICA_MESSAGES_FACTORY.primaryReplicaChangeCommand()
.leaseStartTime(primary.getStartTime().longValue())
.primaryReplicaNodeId(primary.getLeaseholderId())
.primaryReplicaNodeName(primary.getLeaseholder())
.build();
assertThat(svc.run(primaryReplicaChangeCommand), willCompleteSuccessfully());
}
}
private static TimestampAwareReplicaResponse dummyTimestampAwareResponse(ReplicaResult r) {
return new TimestampAwareReplicaResponse() {
@Override
public @Nullable Object result() {
return r.result();
}
@Override
public @Nullable HybridTimestamp timestamp() {
return CLOCK.now();
}
@Override
public MessageSerializer<NetworkMessage> serializer() {
return null;
}
@Override
public short messageType() {
return 0;
}
@Override
public short groupType() {
return 0;
}
@Override
public NetworkMessage clone() {
return null;
}
};
}
/**
* Replica listener.
*
* @return Replica listener.
*/
public ReplicaListener getReplicaListener() {
return replicaListener;
}
/**
* Group id of single partition of this table.
*
* @return Group id.
*/
public ReplicationGroupId groupId() {
return groupId;
}
/**
* Gets the transaction manager that is bound to the table.
*
* @return Transaction manager.
*/
public TxManager txManager() {
return txManager;
}
/**
* Creates a {@link TxManager}.
*
* @param replicaSvc Replica service to use.
* @param placementDriver Placement driver.
* @param txConfiguration Transaction configuration.
* @param systemCfg System configuration.
* @param resourcesRegistry Resources registry.
*/
public static TxManagerImpl txManager(
ReplicaService replicaSvc,
PlacementDriver placementDriver,
TransactionConfiguration txConfiguration,
SystemDistributedConfiguration systemCfg,
RemotelyTriggeredResourceRegistry resourcesRegistry
) {
TopologyService topologyService = mock(TopologyService.class);
when(topologyService.localMember()).thenReturn(LOCAL_NODE);
ClusterService clusterService = mock(ClusterService.class);
when(clusterService.messagingService()).thenReturn(new DummyMessagingService(LOCAL_NODE));
when(clusterService.topologyService()).thenReturn(topologyService);
TransactionInflights transactionInflights = new TransactionInflights(placementDriver, CLOCK_SERVICE);
var txManager = new TxManagerImpl(
txConfiguration,
systemCfg,
clusterService,
replicaSvc,
HeapLockManager.smallInstance(),
CLOCK_SERVICE,
new TransactionIdGenerator(0xdeadbeef),
placementDriver,
() -> DEFAULT_IDLE_SAFE_TIME_PROPAGATION_PERIOD_MILLISECONDS,
new TestLocalRwTxCounter(),
resourcesRegistry,
transactionInflights,
new TestLowWatermark(),
COMMON_SCHEDULER,
new NoOpMetricManager()
);
assertThat(txManager.startAsync(new ComponentContext()), willCompleteSuccessfully());
return txManager;
}
/** {@inheritDoc} */
@Override
public CompletableFuture<BinaryRow> get(BinaryRowEx keyRow, InternalTransaction tx) {
return super.get(keyRow, tx);
}
/** {@inheritDoc} */
@Override
public CompletableFuture<InternalClusterNode> evaluateReadOnlyRecipientNode(int partId, @Nullable HybridTimestamp readTimestamp) {
return completedFuture(LOCAL_NODE);
}
/**
* Returns dummy table index storages supplier.
*
* @param indexes Index storage by ID.
*/
public static TableIndexStoragesSupplier createTableIndexStoragesSupplier(Map<Integer, TableSchemaAwareIndexStorage> indexes) {
return () -> indexes;
}
/**
* Dummy messaging service for tests purposes. It does not provide any messaging functionality, but allows to trigger events.
*/
private static class DummyMessagingService extends AbstractMessagingService {
private final InternalClusterNode localNode;
private final AtomicLong correlationIdGenerator = new AtomicLong();
DummyMessagingService(InternalClusterNode localNode) {
this.localNode = localNode;
}
/** {@inheritDoc} */
@Override
public void weakSend(InternalClusterNode recipient, NetworkMessage msg) {
throw new UnsupportedOperationException("Not implemented yet");
}
/** {@inheritDoc} */
@Override
public CompletableFuture<Void> send(InternalClusterNode recipient, ChannelType channelType, NetworkMessage msg) {
throw new UnsupportedOperationException("Not implemented yet");
}
@Override
public CompletableFuture<Void> send(String recipientConsistentId, ChannelType channelType, NetworkMessage msg) {
throw new UnsupportedOperationException("Not implemented yet");
}
@Override
public CompletableFuture<Void> send(NetworkAddress recipientNetworkAddress, ChannelType channelType, NetworkMessage msg) {
throw new UnsupportedOperationException("Not implemented yet");
}
/** {@inheritDoc} */
@Override
public CompletableFuture<Void> respond(InternalClusterNode recipient, ChannelType type, NetworkMessage msg, long correlationId) {
throw new UnsupportedOperationException("Not implemented yet");
}
/** {@inheritDoc} */
@Override
public CompletableFuture<Void> respond(String recipientConsistentId, ChannelType type, NetworkMessage msg, long correlationId) {
throw new UnsupportedOperationException("Not implemented yet");
}
/** {@inheritDoc} */
@Override
public CompletableFuture<NetworkMessage> invoke(InternalClusterNode recipient, ChannelType type, NetworkMessage msg, long timeout) {
throw new UnsupportedOperationException("Not implemented yet");
}
/** {@inheritDoc} */
@Override
public CompletableFuture<NetworkMessage> invoke(String recipientNodeId, ChannelType type, NetworkMessage msg, long timeout) {
getMessageHandlers(msg.groupType()).forEach(h -> h.onReceived(msg, localNode, correlationIdGenerator.getAndIncrement()));
return nullCompletedFuture();
}
}
private static class NoOpPartitionsSnapshots implements PartitionsSnapshots {
@Override
public PartitionSnapshots partitionSnapshots(PartitionKey partitionKey) {
return mock(PartitionSnapshots.class);
}
@Override
public void cleanupOutgoingSnapshots(PartitionKey partitionKey) {
}
@Override
public void finishOutgoingSnapshot(UUID snapshotId) {
}
}
}
|
apache/maven-plugins | 35,011 | maven-checkstyle-plugin/src/main/java/org/apache/maven/plugins/checkstyle/exec/DefaultCheckstyleExecutor.java | package org.apache.maven.plugins.checkstyle.exec;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.io.ByteArrayInputStream;
import java.io.Closeable;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.lang.reflect.Method;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLClassLoader;
import java.security.AccessController;
import java.security.PrivilegedAction;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import org.apache.commons.io.IOUtils;
import org.apache.maven.artifact.Artifact;
import org.apache.maven.artifact.DependencyResolutionRequiredException;
import org.apache.maven.model.Resource;
import org.apache.maven.project.MavenProject;
import org.codehaus.plexus.component.annotations.Component;
import org.codehaus.plexus.component.annotations.Requirement;
import org.codehaus.plexus.logging.AbstractLogEnabled;
import org.codehaus.plexus.resource.ResourceManager;
import org.codehaus.plexus.resource.loader.FileResourceCreationException;
import org.codehaus.plexus.resource.loader.FileResourceLoader;
import org.codehaus.plexus.resource.loader.ResourceNotFoundException;
import org.codehaus.plexus.util.FileUtils;
import org.codehaus.plexus.util.StringUtils;
import com.puppycrawl.tools.checkstyle.Checker;
import com.puppycrawl.tools.checkstyle.ConfigurationLoader;
import com.puppycrawl.tools.checkstyle.DefaultConfiguration;
import com.puppycrawl.tools.checkstyle.PackageNamesLoader;
import com.puppycrawl.tools.checkstyle.PropertiesExpander;
import com.puppycrawl.tools.checkstyle.api.AuditListener;
import com.puppycrawl.tools.checkstyle.api.CheckstyleException;
import com.puppycrawl.tools.checkstyle.api.Configuration;
import com.puppycrawl.tools.checkstyle.api.FilterSet;
import com.puppycrawl.tools.checkstyle.filters.SuppressionsLoader;
/**
* @author Olivier Lamy
* @since 2.5
* @version $Id$
*/
@Component( role = CheckstyleExecutor.class, hint = "default", instantiationStrategy = "per-lookup" )
public class DefaultCheckstyleExecutor
extends AbstractLogEnabled
implements CheckstyleExecutor
{
@Requirement( hint = "default" )
private ResourceManager locator;
@Requirement( hint = "license" )
private ResourceManager licenseLocator;
public CheckstyleResults executeCheckstyle( CheckstyleExecutorRequest request )
throws CheckstyleExecutorException, CheckstyleException
{
if ( getLogger().isDebugEnabled() )
{
getLogger().debug( "executeCheckstyle start headerLocation : " + request.getHeaderLocation() );
}
MavenProject project = request.getProject();
configureResourceLocator( locator, request, null );
configureResourceLocator( licenseLocator, request, request.getLicenseArtifacts() );
// Config is less critical than License, locator can still be used.
// configureResourceLocator( configurationLocator, request, request.getConfigurationArtifacts() );
List<File> files;
try
{
files = getFilesToProcess( request );
}
catch ( IOException e )
{
throw new CheckstyleExecutorException( "Error getting files to process", e );
}
final String suppressionsFilePath = getSuppressionsFilePath( request );
FilterSet filterSet = getSuppressionsFilterSet( suppressionsFilePath );
Checker checker = new Checker();
// setup classloader, needed to avoid "Unable to get class information for ..." errors
List<String> classPathStrings = new ArrayList<>();
List<String> outputDirectories = new ArrayList<>();
// stand-alone
Collection<File> sourceDirectories = null;
Collection<File> testSourceDirectories = request.getTestSourceDirectories();
// aggregator
Map<MavenProject, Collection<File>> sourceDirectoriesByProject = new HashMap<>();
Map<MavenProject, Collection<File>> testSourceDirectoriesByProject = new HashMap<>();
if ( request.isAggregate() )
{
for ( MavenProject childProject : request.getReactorProjects() )
{
sourceDirectories = new ArrayList<>( childProject.getCompileSourceRoots().size() );
List<String> compileSourceRoots = childProject.getCompileSourceRoots();
for ( String compileSourceRoot : compileSourceRoots )
{
sourceDirectories.add( new File( compileSourceRoot ) );
}
sourceDirectoriesByProject.put( childProject, sourceDirectories );
testSourceDirectories = new ArrayList<>( childProject.getTestCompileSourceRoots().size() );
List<String> testCompileSourceRoots = childProject.getTestCompileSourceRoots();
for ( String testCompileSourceRoot : testCompileSourceRoots )
{
testSourceDirectories.add( new File( testCompileSourceRoot ) );
}
testSourceDirectoriesByProject.put( childProject, testSourceDirectories );
prepareCheckstylePaths( request, childProject, classPathStrings, outputDirectories,
sourceDirectories, testSourceDirectories );
}
}
else
{
sourceDirectories = request.getSourceDirectories();
prepareCheckstylePaths( request, project, classPathStrings, outputDirectories, sourceDirectories,
testSourceDirectories );
}
final List<URL> urls = new ArrayList<>( classPathStrings.size() );
for ( String path : classPathStrings )
{
try
{
urls.add( new File( path ).toURI().toURL() );
}
catch ( MalformedURLException e )
{
throw new CheckstyleExecutorException( e.getMessage(), e );
}
}
for ( String outputDirectoryString : outputDirectories )
{
try
{
if ( outputDirectoryString != null )
{
File outputDirectoryFile = new File( outputDirectoryString );
if ( outputDirectoryFile.exists() )
{
URL outputDirectoryUrl = outputDirectoryFile.toURI().toURL();
getLogger().debug( "Adding the outputDirectory " + outputDirectoryUrl.toString()
+ " to the Checkstyle class path" );
urls.add( outputDirectoryUrl );
}
}
}
catch ( MalformedURLException e )
{
throw new CheckstyleExecutorException( e.getMessage(), e );
}
}
URLClassLoader projectClassLoader = AccessController.doPrivileged( new PrivilegedAction<URLClassLoader>()
{
public URLClassLoader run()
{
return new URLClassLoader( urls.toArray( new URL[urls.size()] ), null );
}
} );
checker.setClassLoader( projectClassLoader );
checker.setModuleClassLoader( Thread.currentThread().getContextClassLoader() );
if ( filterSet != null )
{
checker.addFilter( filterSet );
}
Configuration configuration = getConfiguration( request );
checker.configure( configuration );
AuditListener listener = request.getListener();
if ( listener != null )
{
checker.addListener( listener );
}
if ( request.isConsoleOutput() )
{
checker.addListener( request.getConsoleListener() );
}
CheckstyleCheckerListener checkerListener = new CheckstyleCheckerListener( configuration );
if ( request.isAggregate() )
{
for ( MavenProject childProject : request.getReactorProjects() )
{
sourceDirectories = sourceDirectoriesByProject.get( childProject );
testSourceDirectories = testSourceDirectoriesByProject.get( childProject );
addSourceDirectory( checkerListener, sourceDirectories,
testSourceDirectories,
childProject.getResources(), request );
}
}
else
{
addSourceDirectory( checkerListener, sourceDirectories, testSourceDirectories, request.getResources(),
request );
}
checker.addListener( checkerListener );
int nbErrors = checker.process( files );
checker.destroy();
if ( projectClassLoader instanceof Closeable )
{
try
{
( ( Closeable ) projectClassLoader ).close();
}
catch ( IOException ex )
{
// Nothing we can do - and not detrimental to the build (save running out of file handles).
getLogger().info( "Failed to close custom Classloader - this indicated a bug in the code.", ex );
}
}
if ( request.getStringOutputStream() != null )
{
String message = request.getStringOutputStream().toString().trim();
if ( message.length() > 0 )
{
getLogger().info( message );
}
}
if ( nbErrors > 0 )
{
StringBuilder message = new StringBuilder( "There " );
if ( nbErrors == 1 )
{
message.append( "is" );
}
else
{
message.append( "are" );
}
message.append( " " );
message.append( nbErrors );
message.append( " error" );
if ( nbErrors != 1 )
{
message.append( "s" );
}
message.append( " reported by Checkstyle" );
String version = getCheckstyleVersion();
if ( version != null )
{
message.append( " " );
message.append( version );
}
message.append( " with " );
message.append( request.getConfigLocation() );
message.append( " ruleset." );
if ( request.isFailsOnError() )
{
// TODO: should be a failure, not an error. Report is not meant to
// throw an exception here (so site would
// work regardless of config), but should record this information
throw new CheckstyleExecutorException( message.toString() );
}
else
{
getLogger().info( message.toString() );
}
}
return checkerListener.getResults();
}
protected void addSourceDirectory( CheckstyleCheckerListener sinkListener, Collection<File> sourceDirectories,
Collection<File> testSourceDirectories, List<Resource> resources,
CheckstyleExecutorRequest request )
{
if ( sourceDirectories != null )
{
for ( File sourceDirectory : sourceDirectories )
{
if ( sourceDirectory.exists() )
{
sinkListener.addSourceDirectory( sourceDirectory );
}
}
}
if ( request.isIncludeTestSourceDirectory() && ( testSourceDirectories != null ) )
{
for ( File testSourceDirectory : testSourceDirectories )
{
if ( testSourceDirectory.isDirectory() )
{
sinkListener.addSourceDirectory( testSourceDirectory );
}
}
}
if ( resources != null )
{
for ( Resource resource : resources )
{
if ( resource.getDirectory() != null )
{
File resourcesDirectory = new File( resource.getDirectory() );
if ( resourcesDirectory.exists() && resourcesDirectory.isDirectory() )
{
sinkListener.addSourceDirectory( resourcesDirectory );
getLogger().debug( "Added '" + resourcesDirectory.getAbsolutePath()
+ "' as a source directory." );
}
}
}
}
}
public Configuration getConfiguration( CheckstyleExecutorRequest request )
throws CheckstyleExecutorException
{
try
{
// Checkstyle will always use the context classloader in order
// to load resources (dtds),
// so we have to fix it
ClassLoader checkstyleClassLoader = PackageNamesLoader.class.getClassLoader();
Thread.currentThread().setContextClassLoader( checkstyleClassLoader );
String configFile = getConfigFile( request );
Properties overridingProperties = getOverridingProperties( request );
Configuration config =
ConfigurationLoader.loadConfiguration( configFile, new PropertiesExpander( overridingProperties ),
request.isOmitIgnoredModules() );
String effectiveEncoding = StringUtils.isNotEmpty( request.getEncoding() ) ? request.getEncoding() : System
.getProperty( "file.encoding", "UTF-8" );
if ( StringUtils.isEmpty( request.getEncoding() ) )
{
getLogger().warn( "File encoding has not been set, using platform encoding " + effectiveEncoding
+ ", i.e. build is platform dependent!" );
}
// MCHECKSTYLE-332 Checkstyle 6.16+ (#569): the cache is moved to the Checker module instead of TreeWalker
boolean cacheInChecker = false;
for ( Method method : Checker.class.getMethods() )
{
if ( "setCacheFile".equals( method.getName() )
&& Arrays.equals( method.getParameterTypes(), new Class<?>[] { String.class } ) )
{
cacheInChecker = true;
break;
}
}
if ( "Checker".equals( config.getName() )
|| "com.puppycrawl.tools.checkstyle.Checker".equals( config.getName() ) )
{
if ( config instanceof DefaultConfiguration )
{
// MCHECKSTYLE-173 Only add the "charset" attribute if it has not been set
addAttributeIfNotExists( (DefaultConfiguration) config, "charset", effectiveEncoding );
if ( cacheInChecker )
{
addAttributeIfNotExists( (DefaultConfiguration) config, "cacheFile", request.getCacheFile() );
}
}
else
{
getLogger().warn( "Failed to configure file encoding on module " + config );
}
}
Configuration[] modules = config.getChildren();
for ( Configuration module : modules )
{
if ( "TreeWalker".equals( module.getName() )
|| "com.puppycrawl.tools.checkstyle.TreeWalker".equals( module.getName() ) )
{
if ( module instanceof DefaultConfiguration )
{
if ( !cacheInChecker )
{
addAttributeIfNotExists( (DefaultConfiguration) module, "cacheFile",
request.getCacheFile() );
}
}
else
{
getLogger().warn( "Failed to configure cache file on module " + module );
}
}
}
return config;
}
catch ( CheckstyleException e )
{
throw new CheckstyleExecutorException( "Failed during checkstyle configuration", e );
}
}
private void addAttributeIfNotExists( DefaultConfiguration config, String name, String value )
{
try
{
// MCHECKSTYLE-132 DefaultConfiguration addAttribute has changed in checkstyle 5.3
if ( config.getAttribute( name ) == null )
{
config.addAttribute( name, value );
}
}
catch ( CheckstyleException ex )
{
// MCHECKSTYLE-159 Checkstyle 5.4+ throws an exception when trying to access an attribute that doesn't exist
config.addAttribute( name, value );
}
}
private void prepareCheckstylePaths( CheckstyleExecutorRequest request, MavenProject project,
List<String> classPathStrings, List<String> outputDirectories,
Collection<File> sourceDirectories, Collection<File> testSourceDirectories )
throws CheckstyleExecutorException
{
try
{
outputDirectories.add( project.getBuild().getOutputDirectory() );
if ( request.isIncludeTestSourceDirectory() && ( testSourceDirectories != null )
&& anyDirectoryExists( testSourceDirectories ) )
{
classPathStrings.addAll( project.getTestClasspathElements() );
outputDirectories.add( project.getBuild().getTestOutputDirectory() );
}
else
{
classPathStrings.addAll( project.getCompileClasspathElements() );
}
}
catch ( DependencyResolutionRequiredException e )
{
throw new CheckstyleExecutorException( e.getMessage(), e );
}
}
private boolean anyDirectoryExists( Collection<File> files )
{
for ( File file : files )
{
if ( file.isDirectory() )
{
return true;
}
}
return false;
}
/**
* Get the effective Checkstyle version at runtime.
* @return the MANIFEST implementation version of Checkstyle API package (can be <code>null</code>)
*
*@todo Copied from CheckstyleReportGenerator - move to a utility class
*/
private String getCheckstyleVersion()
{
Package checkstyleApiPackage = Configuration.class.getPackage();
return ( checkstyleApiPackage == null ) ? null : checkstyleApiPackage.getImplementationVersion();
}
private Properties getOverridingProperties( CheckstyleExecutorRequest request )
throws CheckstyleExecutorException
{
Properties p = new Properties();
InputStream in = null;
try
{
if ( request.getPropertiesLocation() != null )
{
if ( getLogger().isDebugEnabled() )
{
getLogger().debug( "request.getPropertiesLocation() " + request.getPropertiesLocation() );
}
File propertiesFile = locator.getResourceAsFile( request.getPropertiesLocation(),
"checkstyle-checker.properties" );
if ( propertiesFile != null )
{
in = new FileInputStream( propertiesFile );
p.load( in );
in.close();
in = null;
}
}
if ( StringUtils.isNotEmpty( request.getPropertyExpansion() ) )
{
String propertyExpansion = request.getPropertyExpansion();
// Convert \ to \\, so that p.load will convert it back properly
propertyExpansion = StringUtils.replace( propertyExpansion, "\\", "\\\\" );
p.load( new ByteArrayInputStream( propertyExpansion.getBytes() ) );
}
// Workaround for MCHECKSTYLE-48
// Make sure that "config/maven-header.txt" is the default value
// for headerLocation, if configLocation="config/maven_checks.xml"
String headerLocation = request.getHeaderLocation();
if ( "config/maven_checks.xml".equals( request.getConfigLocation() ) )
{
if ( "LICENSE.txt".equals( request.getHeaderLocation() ) )
{
headerLocation = "config/maven-header.txt";
}
}
if ( getLogger().isDebugEnabled() )
{
getLogger().debug( "headerLocation " + headerLocation );
}
if ( StringUtils.isNotEmpty( headerLocation ) )
{
try
{
File headerFile = licenseLocator.getResourceAsFile( headerLocation, "checkstyle-header.txt" );
if ( headerFile != null )
{
p.setProperty( "checkstyle.header.file", headerFile.getAbsolutePath() );
}
}
catch ( FileResourceCreationException | ResourceNotFoundException e )
{
getLogger().debug( "Unable to process header location: " + headerLocation );
getLogger().debug( "Checkstyle will throw exception if ${checkstyle.header.file} is used" );
}
}
if ( request.getCacheFile() != null )
{
p.setProperty( "checkstyle.cache.file", request.getCacheFile() );
}
}
catch ( IOException | ResourceNotFoundException | FileResourceCreationException e )
{
throw new CheckstyleExecutorException( "Failed to get overriding properties", e );
}
finally
{
IOUtils.closeQuietly( in );
}
if ( request.getSuppressionsFileExpression() != null )
{
String suppressionsFilePath = getSuppressionsFilePath( request );
if ( suppressionsFilePath != null )
{
p.setProperty( request.getSuppressionsFileExpression(), suppressionsFilePath );
}
}
return p;
}
private List<File> getFilesToProcess( CheckstyleExecutorRequest request )
throws IOException
{
StringBuilder excludesStr = new StringBuilder();
if ( StringUtils.isNotEmpty( request.getExcludes() ) )
{
excludesStr.append( request.getExcludes() );
}
String[] defaultExcludes = FileUtils.getDefaultExcludes();
for ( String defaultExclude : defaultExcludes )
{
if ( excludesStr.length() > 0 )
{
excludesStr.append( "," );
}
excludesStr.append( defaultExclude );
}
Set<File> files = new LinkedHashSet<>();
if ( request.isAggregate() )
{
for ( MavenProject project : request.getReactorProjects() )
{
Set<File> sourceDirectories = new LinkedHashSet<>();
// CompileSourceRoots are absolute paths
List<String> compileSourceRoots = project.getCompileSourceRoots();
for ( String compileSourceRoot : compileSourceRoots )
{
sourceDirectories.add( new File( compileSourceRoot ) );
}
Set<File> testSourceDirectories = new LinkedHashSet<>();
// CompileSourceRoots are absolute paths
List<String> testCompileSourceRoots = project.getTestCompileSourceRoots();
for ( String testCompileSourceRoot : testCompileSourceRoots )
{
testSourceDirectories.add( new File( testCompileSourceRoot ) );
}
addFilesToProcess( request, sourceDirectories, project.getResources(), project.getTestResources(),
files, testSourceDirectories );
}
}
else
{
Collection<File> sourceDirectories = request.getSourceDirectories();
addFilesToProcess( request, sourceDirectories, request.getResources(),
request.getTestResources(), files, request.getTestSourceDirectories() );
}
getLogger().debug( "Added " + files.size() + " files to process." );
return new ArrayList<>( files );
}
private void addFilesToProcess( CheckstyleExecutorRequest request, Collection<File> sourceDirectories,
List<Resource> resources, List<Resource> testResources, Collection<File> files,
Collection<File> testSourceDirectories )
throws IOException
{
if ( sourceDirectories != null )
{
for ( File sourceDirectory : sourceDirectories )
{
if ( sourceDirectory.isDirectory() )
{
final List<File> sourceFiles =
FileUtils.getFiles( sourceDirectory, request.getIncludes(), request.getExcludes() );
files.addAll( sourceFiles );
getLogger().debug( "Added " + sourceFiles.size() + " source files found in '"
+ sourceDirectory.getAbsolutePath() + "'." );
}
}
}
if ( request.isIncludeTestSourceDirectory() && testSourceDirectories != null )
{
for ( File testSourceDirectory : testSourceDirectories )
{
if ( testSourceDirectory.isDirectory() )
{
final List<File> testSourceFiles =
FileUtils.getFiles( testSourceDirectory, request.getIncludes(), request.getExcludes() );
files.addAll( testSourceFiles );
getLogger().debug( "Added " + testSourceFiles.size() + " test source files found in '"
+ testSourceDirectory.getAbsolutePath() + "'." );
}
}
}
if ( resources != null && request.isIncludeResources() )
{
addResourceFilesToProcess( request, resources, files );
}
else
{
getLogger().debug( "No resources found in this project." );
}
if ( testResources != null && request.isIncludeTestResources() )
{
addResourceFilesToProcess( request, testResources, files );
}
else
{
getLogger().debug( "No test resources found in this project." );
}
}
private void addResourceFilesToProcess( CheckstyleExecutorRequest request, List<Resource> resources,
Collection<File> files )
throws IOException
{
for ( Resource resource : resources )
{
if ( resource.getDirectory() != null )
{
File resourcesDirectory = new File( resource.getDirectory() );
if ( resourcesDirectory.isDirectory() )
{
String includes = request.getResourceIncludes();
String excludes = request.getResourceExcludes();
// MCHECKSTYLE-214: Only with project-root respect in/excludes, otherwise you'll get every file
if ( resourcesDirectory.equals( request.getProject().getBasedir() ) )
{
String resourceIncludes = StringUtils.join( resource.getIncludes().iterator(), "," );
if ( StringUtils.isEmpty( includes ) )
{
includes = resourceIncludes;
}
else
{
includes += "," + resourceIncludes;
}
String resourceExcludes = StringUtils.join( resource.getExcludes().iterator(), "," );
if ( StringUtils.isEmpty( excludes ) )
{
excludes = resourceExcludes;
}
else
{
excludes += "," + resourceExcludes;
}
}
List<File> resourceFiles =
FileUtils.getFiles( resourcesDirectory, includes, excludes );
files.addAll( resourceFiles );
getLogger().debug( "Added " + resourceFiles.size() + " resource files found in '"
+ resourcesDirectory.getAbsolutePath() + "'." );
}
else
{
getLogger().debug( "The resources directory '" + resourcesDirectory.getAbsolutePath()
+ "' does not exist or is not a directory." );
}
}
}
}
private FilterSet getSuppressionsFilterSet( final String suppressionsFilePath )
throws CheckstyleExecutorException
{
if ( suppressionsFilePath == null )
{
return null;
}
try
{
return SuppressionsLoader.loadSuppressions( suppressionsFilePath );
}
catch ( CheckstyleException ce )
{
throw new CheckstyleExecutorException( "Failed to load suppressions file from: "
+ suppressionsFilePath, ce );
}
}
private String getSuppressionsFilePath( final CheckstyleExecutorRequest request )
throws CheckstyleExecutorException
{
final String suppressionsLocation = request.getSuppressionsLocation();
if ( StringUtils.isEmpty( suppressionsLocation ) )
{
return null;
}
try
{
File suppressionsFile = locator.getResourceAsFile( suppressionsLocation, "checkstyle-suppressions.xml" );
return suppressionsFile == null ? null : suppressionsFile.getAbsolutePath();
}
catch ( ResourceNotFoundException e )
{
throw new CheckstyleExecutorException( "Unable to find suppressions file at location: "
+ suppressionsLocation, e );
}
catch ( FileResourceCreationException e )
{
throw new CheckstyleExecutorException( "Unable to process suppressions file location: "
+ suppressionsLocation, e );
}
}
private String getConfigFile( CheckstyleExecutorRequest request )
throws CheckstyleExecutorException
{
try
{
if ( getLogger().isDebugEnabled() )
{
getLogger().debug( "request.getConfigLocation() " + request.getConfigLocation() );
}
File configFile = locator.getResourceAsFile( request.getConfigLocation(), "checkstyle-checker.xml" );
if ( configFile == null )
{
throw new CheckstyleExecutorException( "Unable to process config location: "
+ request.getConfigLocation() );
}
return configFile.getAbsolutePath();
}
catch ( ResourceNotFoundException e )
{
throw new CheckstyleExecutorException( "Unable to find configuration file at location: "
+ request.getConfigLocation(), e );
}
catch ( FileResourceCreationException e )
{
throw new CheckstyleExecutorException( "Unable to process configuration file at location: "
+ request.getConfigLocation(), e );
}
}
/**
* Configures search paths in the resource locator.
* This method should only be called once per execution.
*
* @param request executor request data.
*/
private void configureResourceLocator( final ResourceManager resourceManager,
final CheckstyleExecutorRequest request,
final List<Artifact> additionalArtifacts )
{
final MavenProject project = request.getProject();
resourceManager.setOutputDirectory( new File( project.getBuild().getDirectory() ) );
// Recurse up the parent hierarchy and add project directories to the search roots
MavenProject parent = project;
while ( parent != null && parent.getFile() != null )
{
// MCHECKSTYLE-131 ( olamy ) I don't like this hack.
// (dkulp) Me either. It really pollutes the location stuff
// by allowing searches of stuff outside the current module.
File dir = parent.getFile().getParentFile();
resourceManager.addSearchPath( FileResourceLoader.ID, dir.getAbsolutePath() );
parent = parent.getParent();
}
resourceManager.addSearchPath( "url", "" );
// MCHECKSTYLE-225: load licenses from additional artifacts, not from classpath
if ( additionalArtifacts != null )
{
for ( Artifact licenseArtifact : additionalArtifacts )
{
try
{
resourceManager.addSearchPath( "jar", "jar:" + licenseArtifact.getFile().toURI().toURL() );
}
catch ( MalformedURLException e )
{
// noop
}
}
}
}
}
|
apache/ozone | 35,056 | hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/om/service/TestSnapshotDeletingServiceIntegrationTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.ozone.om.service;
import static org.apache.hadoop.hdds.scm.ScmConfigKeys.OZONE_SCM_CHUNK_SIZE_KEY;
import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_ACL_ENABLED;
import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_BLOCK_DELETING_SERVICE_INTERVAL;
import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_SNAPSHOT_DELETING_SERVICE_INTERVAL;
import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_SNAPSHOT_DELETING_SERVICE_TIMEOUT;
import static org.apache.hadoop.ozone.om.OMConfigKeys.OZONE_SNAPSHOT_DEEP_CLEANING_ENABLED;
import static org.apache.hadoop.ozone.om.lock.OzoneManagerLock.FlatResource.SNAPSHOT_GC_LOCK;
import static org.junit.jupiter.api.Assertions.assertDoesNotThrow;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyList;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.mockConstruction;
import static org.mockito.Mockito.when;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayDeque;
import java.util.Arrays;
import java.util.Deque;
import java.util.Iterator;
import java.util.List;
import java.util.Objects;
import java.util.UUID;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicLong;
import java.util.stream.Collectors;
import org.apache.commons.lang3.RandomStringUtils;
import org.apache.hadoop.hdds.conf.OzoneConfiguration;
import org.apache.hadoop.hdds.conf.StorageUnit;
import org.apache.hadoop.hdds.utils.IOUtils;
import org.apache.hadoop.hdds.utils.db.Table;
import org.apache.hadoop.ozone.MiniOzoneCluster;
import org.apache.hadoop.ozone.OzoneConfigKeys;
import org.apache.hadoop.ozone.TestDataUtil;
import org.apache.hadoop.ozone.client.BucketArgs;
import org.apache.hadoop.ozone.client.OzoneBucket;
import org.apache.hadoop.ozone.client.OzoneClient;
import org.apache.hadoop.ozone.client.OzoneVolume;
import org.apache.hadoop.ozone.om.OMConfigKeys;
import org.apache.hadoop.ozone.om.OMMetadataManager;
import org.apache.hadoop.ozone.om.OmMetadataManagerImpl;
import org.apache.hadoop.ozone.om.OmSnapshot;
import org.apache.hadoop.ozone.om.OzoneManager;
import org.apache.hadoop.ozone.om.SnapshotChainManager;
import org.apache.hadoop.ozone.om.helpers.BucketLayout;
import org.apache.hadoop.ozone.om.helpers.OmDirectoryInfo;
import org.apache.hadoop.ozone.om.helpers.OmKeyInfo;
import org.apache.hadoop.ozone.om.helpers.RepeatedOmKeyInfo;
import org.apache.hadoop.ozone.om.helpers.SnapshotInfo;
import org.apache.hadoop.ozone.om.lock.OMLockDetails;
import org.apache.hadoop.ozone.om.snapshot.MultiSnapshotLocks;
import org.apache.hadoop.ozone.om.snapshot.SnapshotUtils;
import org.apache.hadoop.ozone.om.snapshot.filter.ReclaimableKeyFilter;
import org.apache.ozone.test.GenericTestUtils;
import org.apache.ozone.test.tag.Flaky;
import org.apache.ozone.test.tag.Unhealthy;
import org.apache.ratis.util.function.UncheckedAutoCloseableSupplier;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.MethodOrderer.OrderAnnotation;
import org.junit.jupiter.api.Order;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.TestInstance;
import org.junit.jupiter.api.TestMethodOrder;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.CsvSource;
import org.mockito.MockedConstruction;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Test Snapshot Deleting Service.
*/
@TestInstance(TestInstance.Lifecycle.PER_CLASS)
@TestMethodOrder(OrderAnnotation.class)
@Unhealthy("HDDS-13303")
public class TestSnapshotDeletingServiceIntegrationTest {
private static final Logger LOG =
LoggerFactory.getLogger(TestSnapshotDeletingServiceIntegrationTest.class);
private static final ByteBuffer CONTENT =
ByteBuffer.allocate(1024 * 1024 * 16);
private MiniOzoneCluster cluster;
private OzoneManager om;
private OzoneBucket bucket1;
private OzoneClient client;
private final Deque<UncheckedAutoCloseableSupplier<OmSnapshot>> rcSnaps = new ArrayDeque<>();
private static final String VOLUME_NAME = "vol1";
private static final String BUCKET_NAME_ONE = "bucket1";
private static final String BUCKET_NAME_TWO = "bucket2";
private static final String BUCKET_NAME_FSO = "bucketfso";
private boolean runIndividualTest = true;
@BeforeAll
public void setup() throws Exception {
OzoneConfiguration conf = new OzoneConfiguration();
conf.setStorageSize(OzoneConfigKeys.OZONE_SCM_BLOCK_SIZE,
4, StorageUnit.MB);
conf.setStorageSize(OZONE_SCM_CHUNK_SIZE_KEY,
1, StorageUnit.MB);
conf.setTimeDuration(OZONE_SNAPSHOT_DELETING_SERVICE_INTERVAL,
500, TimeUnit.MILLISECONDS);
conf.setBoolean(OZONE_SNAPSHOT_DEEP_CLEANING_ENABLED, true);
conf.setTimeDuration(OZONE_SNAPSHOT_DELETING_SERVICE_TIMEOUT,
500, TimeUnit.MILLISECONDS);
conf.setInt(OMConfigKeys.OZONE_DIR_DELETING_SERVICE_INTERVAL, 500);
conf.setTimeDuration(OZONE_BLOCK_DELETING_SERVICE_INTERVAL, 500,
TimeUnit.MILLISECONDS);
conf.setBoolean(OZONE_ACL_ENABLED, true);
// Enable filesystem snapshot feature for the test regardless of the default
conf.setBoolean(OMConfigKeys.OZONE_FILESYSTEM_SNAPSHOT_ENABLED_KEY, true);
cluster = MiniOzoneCluster.newBuilder(conf)
.setNumDatanodes(3)
.build();
cluster.waitForClusterToBeReady();
client = cluster.newClient();
om = cluster.getOzoneManager();
bucket1 = TestDataUtil.createVolumeAndBucket(
client, VOLUME_NAME, BUCKET_NAME_ONE, BucketLayout.DEFAULT);
}
@AfterAll
public void teardown() {
IOUtils.closeQuietly(client);
if (cluster != null) {
cluster.shutdown();
}
}
@AfterEach
public void closeAllSnapshots() {
while (!rcSnaps.isEmpty()) {
rcSnaps.pop().close();
}
// Resume services
om.getKeyManager().getDirDeletingService().resume();
om.getKeyManager().getDeletingService().resume();
om.getKeyManager().getSnapshotDeletingService().resume();
}
private UncheckedAutoCloseableSupplier<OmSnapshot> getOmSnapshot(String volume, String bucket, String snapshotName)
throws IOException {
rcSnaps.push(om.getOmSnapshotManager().getSnapshot(volume, bucket, snapshotName));
return rcSnaps.peek();
}
@Test
@Order(2)
@Flaky("HDDS-11130")
public void testSnapshotSplitAndMove() throws Exception {
if (runIndividualTest) {
SnapshotDeletingService snapshotDeletingService =
om.getKeyManager().getSnapshotDeletingService();
Table<String, SnapshotInfo> snapshotInfoTable =
om.getMetadataManager().getSnapshotInfoTable();
createSnapshotDataForBucket(bucket1);
assertTableRowCount(snapshotInfoTable, 2);
GenericTestUtils.waitFor(() -> snapshotDeletingService
.getSuccessfulRunCount() >= 1, 1000, 10000);
}
OmSnapshot bucket1snap3 = getOmSnapshot(VOLUME_NAME, BUCKET_NAME_ONE, "bucket1snap3").get();
// Check bucket1key1 added to next non deleted snapshot db.
List<? extends Table.KeyValue<String, RepeatedOmKeyInfo>> omKeyInfos =
bucket1snap3.getMetadataManager()
.getDeletedTable().getRangeKVs(null, 100,
"/vol1/bucket1/bucket1key1");
assertEquals(1, omKeyInfos.size());
}
@Test
@Order(1)
public void testMultipleSnapshotKeyReclaim() throws Exception {
Table<String, RepeatedOmKeyInfo> deletedTable =
om.getMetadataManager().getDeletedTable();
Table<String, SnapshotInfo> snapshotInfoTable =
om.getMetadataManager().getSnapshotInfoTable();
runIndividualTest = false;
createSnapshotDataForBucket(bucket1);
BucketArgs bucketArgs = new BucketArgs.Builder()
.setBucketLayout(BucketLayout.LEGACY)
.build();
OzoneBucket bucket2 = TestDataUtil.createBucket(
client, VOLUME_NAME, bucketArgs, BUCKET_NAME_TWO);
// Create key1 and key2
TestDataUtil.createKey(bucket2, "bucket2key1", CONTENT.array());
TestDataUtil.createKey(bucket2, "bucket2key2", CONTENT.array());
// Create Snapshot
client.getObjectStore().createSnapshot(VOLUME_NAME, BUCKET_NAME_TWO,
"bucket2snap1");
assertTableRowCount(snapshotInfoTable, 3);
// Both key 1 and key 2 can be reclaimed when Snapshot 1 is deleted.
client.getProxy().deleteKey(VOLUME_NAME, BUCKET_NAME_TWO,
"bucket2key1", false);
client.getProxy().deleteKey(VOLUME_NAME, BUCKET_NAME_TWO,
"bucket2key2", false);
assertTableRowCount(deletedTable, 2);
SnapshotInfo delSnapInfo = snapshotInfoTable
.get("/vol1/bucket2/bucket2snap1");
client.getObjectStore().deleteSnapshot(VOLUME_NAME, BUCKET_NAME_TWO,
"bucket2snap1");
assertTableRowCount(snapshotInfoTable, 2);
// KeyDeletingService will clean up.
assertTableRowCount(deletedTable, 0);
verifySnapshotChain(delSnapInfo, null);
// verify the cache of purged snapshot
// /vol1/bucket2/bucket2snap1 has been cleaned up from cache map
assertEquals(2, om.getOmSnapshotManager().getSnapshotCacheSize());
// cleaning up the data
client.getProxy().deleteSnapshot(VOLUME_NAME, BUCKET_NAME_ONE, "bucket1snap1");
client.getProxy().deleteSnapshot(VOLUME_NAME, BUCKET_NAME_ONE, "bucket1snap3");
client.getProxy().deleteBucket(VOLUME_NAME, BUCKET_NAME_TWO);
}
@SuppressWarnings("checkstyle:MethodLength")
@Test
@Order(3)
@Flaky("HDDS-11131")
public void testSnapshotWithFSO() throws Exception {
Table<String, OmDirectoryInfo> dirTable =
om.getMetadataManager().getDirectoryTable();
Table<String, SnapshotInfo> snapshotInfoTable =
om.getMetadataManager().getSnapshotInfoTable();
Table<String, OmKeyInfo> keyTable =
om.getMetadataManager().getFileTable();
Table<String, RepeatedOmKeyInfo> deletedTable =
om.getMetadataManager().getDeletedTable();
Table<String, OmKeyInfo> deletedDirTable =
om.getMetadataManager().getDeletedDirTable();
Table<String, String> renamedTable =
om.getMetadataManager().getSnapshotRenamedTable();
BucketArgs bucketArgs = new BucketArgs.Builder()
.setBucketLayout(BucketLayout.FILE_SYSTEM_OPTIMIZED)
.build();
OzoneBucket bucket2 = TestDataUtil.createBucket(
client, VOLUME_NAME, bucketArgs, BUCKET_NAME_FSO);
assertTableRowCount(snapshotInfoTable, 0);
assertTableRowCount(deletedDirTable, 0);
assertTableRowCount(deletedTable, 0);
om.getKeyManager().getDirDeletingService().suspend();
om.getKeyManager().getDeletingService().suspend();
// Create 10 keys
for (int i = 1; i <= 10; i++) {
TestDataUtil.createKey(bucket2, "key" + i, CONTENT.array());
}
// Create 5 keys to overwrite
for (int i = 11; i <= 15; i++) {
TestDataUtil.createKey(bucket2, "key" + i, CONTENT.array());
}
// Create Directory and Sub
for (int i = 1; i <= 3; i++) {
String parent = "parent" + i;
client.getProxy().createDirectory(VOLUME_NAME,
BUCKET_NAME_FSO, parent);
for (int j = 1; j <= 3; j++) {
String childFile = "/childFile" + j;
String childDir = "/childDir" + j;
client.getProxy().createDirectory(VOLUME_NAME,
BUCKET_NAME_FSO, parent + childDir);
TestDataUtil.createKey(bucket2, parent + childFile, CONTENT.array());
}
}
// Total 12 dirs, 19 keys.
assertTableRowCount(dirTable, 12);
assertTableRowCount(keyTable, 24);
assertTableRowCount(deletedDirTable, 0);
// Create Snapshot1
client.getObjectStore().createSnapshot(VOLUME_NAME, BUCKET_NAME_FSO,
"snap1");
assertTableRowCount(snapshotInfoTable, 1);
// Overwrite 3 keys -> Moves previous version to deletedTable
for (int i = 11; i <= 13; i++) {
TestDataUtil.createKey(bucket2, "key" + i, CONTENT.array());
}
assertTableRowCount(keyTable, 24);
// Delete 5 Keys
for (int i = 1; i <= 5; i++) {
client.getProxy().deleteKey(VOLUME_NAME, BUCKET_NAME_FSO,
"key" + i, false);
}
// Rename Keys 3 keys
for (int i = 6; i <= 8; i++) {
client.getProxy().renameKey(VOLUME_NAME, BUCKET_NAME_FSO, "key" + i,
"renamedKey" + i);
}
// Rename 1 Dir
for (int i = 1; i <= 1; i++) {
client.getProxy().renameKey(VOLUME_NAME, BUCKET_NAME_FSO, "/parent" + i,
"/renamedParent" + i);
}
// Delete 2 Dirs
for (int i = 2; i <= 3; i++) {
client.getProxy().deleteKey(VOLUME_NAME, BUCKET_NAME_FSO, "/parent" + i,
true);
}
assertTableRowCount(renamedTable, 4);
// Delete Renamed Keys
for (int i = 6; i <= 8; i++) {
client.getProxy().deleteKey(VOLUME_NAME, BUCKET_NAME_FSO,
"renamedKey" + i, false);
}
// Delete Renamed Dir
for (int i = 1; i <= 1; i++) {
client.getProxy().deleteKey(VOLUME_NAME, BUCKET_NAME_FSO,
"/renamedParent" + i, true);
}
assertTableRowCount(deletedTable, 11);
assertTableRowCount(deletedDirTable, 3);
assertTableRowCount(dirTable, 9);
assertTableRowCount(renamedTable, 4);
// Create Snapshot2
client.getObjectStore().createSnapshot(VOLUME_NAME, BUCKET_NAME_FSO,
"snap2");
assertTableRowCount(snapshotInfoTable, 2);
// Once snapshot is taken renamedTable, deletedTable, deletedDirTable
// should be cleaned
assertTableRowCount(renamedTable, 0);
assertTableRowCount(deletedTable, 0);
assertTableRowCount(deletedDirTable, 0);
// Delete 3 overwritten keys
for (int i = 11; i <= 13; i++) {
client.getProxy().deleteKey(VOLUME_NAME, BUCKET_NAME_FSO,
"key" + i, false);
}
// Overwrite 2 keys
for (int i = 14; i <= 15; i++) {
TestDataUtil.createKey(bucket2, "key" + i, CONTENT.array());
}
// Delete 2 more keys
for (int i = 9; i <= 10; i++) {
client.getProxy().deleteKey(VOLUME_NAME, BUCKET_NAME_FSO,
"key" + i, false);
}
assertTableRowCount(deletedTable, 7);
// Create Snapshot3
client.getObjectStore().createSnapshot(VOLUME_NAME, BUCKET_NAME_FSO,
"snap3");
assertTableRowCount(snapshotInfoTable, 3);
assertTableRowCount(renamedTable, 0);
assertTableRowCount(deletedDirTable, 0);
assertTableRowCount(deletedTable, 0);
assertTableRowCount(keyTable, 11);
SnapshotInfo deletedSnap = om.getMetadataManager()
.getSnapshotInfoTable().get("/vol1/bucketfso/snap2");
om.getKeyManager().getDirDeletingService().resume();
om.getKeyManager().getDeletingService().resume();
for (int i = 1; i <= 3; i++) {
String snapshotName = "snap" + i;
GenericTestUtils.waitFor(() -> {
try {
SnapshotInfo snap = om.getMetadataManager().getSnapshotInfo(VOLUME_NAME, BUCKET_NAME_FSO, snapshotName);
LOG.info("SnapshotInfo for {} is {}", snapshotName, snap.getSnapshotId());
return snap.isDeepCleaned() && snap.isDeepCleanedDeletedDir();
} catch (IOException e) {
throw new RuntimeException(e);
}
}, 2000, 100000);
}
om.getKeyManager().getDirDeletingService().suspend();
om.getKeyManager().getDeletingService().suspend();
UncheckedAutoCloseableSupplier<OmSnapshot> rcSnap2 =
getOmSnapshot(VOLUME_NAME, BUCKET_NAME_FSO, "snap2");
OmSnapshot snap2 = rcSnap2.get();
//Child directories should have moved to deleted Directory table to deleted directory table of snap2
assertTableRowCount(dirTable, 0);
assertTableRowCount(keyTable, 11);
assertTableRowCount(snap2.getMetadataManager().getDeletedDirTable(), 12);
assertTableRowCount(snap2.getMetadataManager().getDeletedTable(), 11);
client.getObjectStore().deleteSnapshot(VOLUME_NAME, BUCKET_NAME_FSO,
"snap2");
rcSnap2.close();
assertTableRowCount(snapshotInfoTable, 2);
// Delete 2 overwritten keys
for (int i = 14; i <= 15; i++) {
client.getProxy().deleteKey(VOLUME_NAME, BUCKET_NAME_FSO,
"key" + i, false);
}
assertTableRowCount(deletedTable, 2);
// Once all the tables are moved, the snapshot is deleted
assertTableRowCount(om.getMetadataManager().getSnapshotInfoTable(), 2);
verifySnapshotChain(deletedSnap, "/vol1/bucketfso/snap3");
UncheckedAutoCloseableSupplier<OmSnapshot> rcSnap3 = getOmSnapshot(VOLUME_NAME, BUCKET_NAME_FSO, "snap3");
OmSnapshot snap3 = rcSnap3.get();
Table<String, OmKeyInfo> snapDeletedDirTable =
snap3.getMetadataManager().getDeletedDirTable();
Table<String, String> snapRenamedTable =
snap3.getMetadataManager().getSnapshotRenamedTable();
Table<String, RepeatedOmKeyInfo> snapDeletedTable =
snap3.getMetadataManager().getDeletedTable();
assertTableRowCount(snapRenamedTable, 4);
assertTableRowCount(snapDeletedDirTable, 12);
// All the keys deleted before snapshot2 is moved to snap3
assertTableRowCount(snapDeletedTable, 18);
om.getKeyManager().getDirDeletingService().resume();
om.getKeyManager().getDeletingService().resume();
for (int snapshotIndex : new int[] {1, 3}) {
String snapshotName = "snap" + snapshotIndex;
GenericTestUtils.waitFor(() -> {
try {
SnapshotInfo snap = om.getMetadataManager().getSnapshotInfo(VOLUME_NAME, BUCKET_NAME_FSO, snapshotName);
return snap.isDeepCleaned() && snap.isDeepCleanedDeletedDir();
} catch (IOException e) {
throw new RuntimeException(e);
}
}, 2000, 100000);
}
om.getKeyManager().getDirDeletingService().suspend();
om.getKeyManager().getDeletingService().suspend();
assertTableRowCount(snapRenamedTable, 4);
assertTableRowCount(snapDeletedDirTable, 12);
// All the keys deleted before snapshot2 is moved to snap3
assertTableRowCount(snapDeletedTable, 15);
// Before deleting the last snapshot
assertTableRowCount(renamedTable, 0);
assertTableRowCount(deletedDirTable, 0);
assertTableRowCount(deletedTable, 2);
// Delete Snapshot3 and check entries moved to active DB
client.getObjectStore().deleteSnapshot(VOLUME_NAME, BUCKET_NAME_FSO,
"snap3");
rcSnap3.close();
om.getKeyManager().getDirDeletingService().resume();
om.getKeyManager().getDeletingService().resume();
// Check entries moved to active DB
assertTableRowCount(snapshotInfoTable, 1);
assertTableRowCount(renamedTable, 4);
assertTableRowCount(deletedDirTable, 12);
assertTableRowCount(deletedTable, 15);
UncheckedAutoCloseableSupplier<OmSnapshot> rcSnap1 = getOmSnapshot(VOLUME_NAME, BUCKET_NAME_FSO, "snap1");
OmSnapshot snap1 = rcSnap1.get();
Table<String, OmKeyInfo> snap1KeyTable =
snap1.getMetadataManager().getFileTable();
try (Table.KeyValueIterator<String, RepeatedOmKeyInfo> iterator = deletedTable.iterator()) {
while (iterator.hasNext()) {
Table.KeyValue<String, RepeatedOmKeyInfo> next = iterator.next();
String activeDBDeletedKey = next.getKey();
if (activeDBDeletedKey.matches(".*/key1/.*")) {
RepeatedOmKeyInfo activeDBDeleted = next.getValue();
OMMetadataManager metadataManager =
cluster.getOzoneManager().getMetadataManager();
assertEquals(1, activeDBDeleted.getOmKeyInfoList().size());
OmKeyInfo activeDbDeletedKeyInfo =
activeDBDeleted.getOmKeyInfoList().get(0);
long volumeId = metadataManager
.getVolumeId(activeDbDeletedKeyInfo.getVolumeName());
long bucketId = metadataManager
.getBucketId(activeDbDeletedKeyInfo.getVolumeName(),
activeDbDeletedKeyInfo.getBucketName());
String keyForSnap =
metadataManager.getOzonePathKey(volumeId, bucketId,
activeDbDeletedKeyInfo.getParentObjectID(),
activeDbDeletedKeyInfo.getKeyName());
OmKeyInfo snap1keyInfo = snap1KeyTable.get(keyForSnap);
assertEquals(activeDbDeletedKeyInfo.getLatestVersionLocations()
.getLocationList(), snap1keyInfo.getLatestVersionLocations()
.getLocationList());
}
}
}
assertTableRowCount(deletedTable, 15);
snap1 = null;
}
/*
Flow
----
create key0
create key1
create snapshot1
create key0
create key2
delete key1
delete key2
delete key0
create snapshot2
create key3
create key4
delete key4
create snapshot3
delete snapshot2
*/
private synchronized void createSnapshotDataForBucket(OzoneBucket bucket) throws Exception {
Table<String, SnapshotInfo> snapshotInfoTable =
om.getMetadataManager().getSnapshotInfoTable();
Table<String, RepeatedOmKeyInfo> deletedTable =
om.getMetadataManager().getDeletedTable();
Table<String, OmKeyInfo> keyTable =
om.getMetadataManager().getKeyTable(BucketLayout.DEFAULT);
OmMetadataManagerImpl metadataManager = (OmMetadataManagerImpl)
om.getMetadataManager();
TestDataUtil.createKey(bucket, bucket.getName() + "key0", CONTENT.array());
TestDataUtil.createKey(bucket, bucket.getName() + "key1", CONTENT.array());
assertTableRowCount(keyTable, 2);
// Create Snapshot 1.
client.getProxy().createSnapshot(bucket.getVolumeName(), bucket.getName(),
bucket.getName() + "snap1");
assertTableRowCount(snapshotInfoTable, 1);
// Overwrite bucket1key0, This is a newer version of the key which should
// reclaimed as this is a different version of the key.
TestDataUtil.createKey(bucket, bucket.getName() + "key0", CONTENT.array());
TestDataUtil.createKey(bucket, bucket.getName() + "key2", CONTENT.array());
// Key 1 cannot be reclaimed as it is still referenced by Snapshot 1.
client.getProxy().deleteKey(bucket.getVolumeName(), bucket.getName(),
bucket.getName() + "key1", false);
// Key 2 is deleted here, which will be reclaimed here as
// it is not being referenced by previous snapshot.
client.getProxy().deleteKey(bucket.getVolumeName(), bucket.getName(),
bucket.getName() + "key2", false);
client.getProxy().deleteKey(bucket.getVolumeName(), bucket.getName(),
bucket.getName() + "key0", false);
assertTableRowCount(keyTable, 0);
// one copy of bucket1key0 should also be reclaimed as it not same
// but original deleted key created during overwrite should not be deleted
assertTableRowCount(deletedTable, 2);
// Create Snapshot 2.
client.getProxy().createSnapshot(bucket.getVolumeName(), bucket.getName(),
bucket.getName() + "snap2");
assertTableRowCount(snapshotInfoTable, 2);
// Key 2 is removed from the active Db's
// deletedTable when Snapshot 2 is taken.
assertTableRowCount(deletedTable, 0);
TestDataUtil.createKey(bucket, bucket.getName() + "key3", CONTENT.array());
TestDataUtil.createKey(bucket, bucket.getName() + "key4", CONTENT.array());
client.getProxy().deleteKey(bucket.getVolumeName(), bucket.getName(),
bucket.getName() + "key4", false);
assertTableRowCount(keyTable, 1);
assertTableRowCount(deletedTable, 0);
// Create Snapshot 3.
client.getProxy().createSnapshot(bucket.getVolumeName(), bucket.getName(),
bucket.getName() + "snap3");
assertTableRowCount(snapshotInfoTable, 3);
SnapshotInfo snapshotInfo = metadataManager.getSnapshotInfoTable()
.get(String.format("/%s/%s/%ssnap2", bucket.getVolumeName(), bucket.getName(), bucket.getName()));
// Delete Snapshot 2.
client.getProxy().deleteSnapshot(bucket.getVolumeName(), bucket.getName(),
bucket.getName() + "snap2");
assertTableRowCount(snapshotInfoTable, 2);
verifySnapshotChain(snapshotInfo, String.format("/%s/%s/%ssnap3", bucket.getVolumeName(), bucket.getName(),
bucket.getName()));
}
private MockedConstruction<ReclaimableKeyFilter> getMockedReclaimableKeyFilter(String volume, String bucket,
AtomicBoolean kdsWaitStarted, AtomicBoolean sdsLockWaitStarted,
AtomicBoolean sdsLockAcquired, AtomicBoolean kdsFinished, ReclaimableKeyFilter keyFilter) throws IOException {
return mockConstruction(ReclaimableKeyFilter.class,
(mocked, context) -> {
when(mocked.apply(any())).thenAnswer(i -> {
Table.KeyValue<String, OmKeyInfo> keyInfo = i.getArgument(0);
if (!keyInfo.getValue().getVolumeName().equals(volume) ||
!keyInfo.getValue().getBucketName().equals(bucket)) {
return keyFilter.apply(i.getArgument(0));
}
keyFilter.apply(i.getArgument(0));
//Notify SDS that Kds has started for the bucket.
kdsWaitStarted.set(true);
GenericTestUtils.waitFor(sdsLockWaitStarted::get, 1000, 10000);
// Wait for 1 more second so that the command moves to lock wait.
Thread.sleep(1000);
return keyFilter.apply(i.getArgument(0));
});
doAnswer(i -> {
assertTrue(sdsLockWaitStarted.get());
assertFalse(sdsLockAcquired.get());
kdsFinished.set(true);
keyFilter.close();
return null;
}).when(mocked).close();
when(mocked.getExclusiveReplicatedSizeMap()).thenAnswer(i -> keyFilter.getExclusiveReplicatedSizeMap());
when(mocked.getExclusiveSizeMap()).thenAnswer(i -> keyFilter.getExclusiveSizeMap());
});
}
@ParameterizedTest
@CsvSource({"true, 0", "true, 1", "false, 0", "false, 1", "false, 2"})
@DisplayName("Tests Snapshot Deleting Service while KeyDeletingService is already running.")
@Order(4)
public void testSnapshotDeletingServiceWaitsForKeyDeletingService(boolean kdsRunningOnAOS,
int snasphotDeleteIndex) throws Exception {
SnapshotChainManager snapshotChainManager =
((OmMetadataManagerImpl)om.getMetadataManager()).getSnapshotChainManager();
GenericTestUtils.waitFor(() -> {
try {
Iterator<UUID> itr = snapshotChainManager.iterator(false);
while (itr.hasNext()) {
SnapshotInfo snapshotInfo = SnapshotUtils.getSnapshotInfo(om, snapshotChainManager, itr.next());
assertEquals(SnapshotInfo.SnapshotStatus.SNAPSHOT_ACTIVE, snapshotInfo.getSnapshotStatus());
}
return true;
} catch (IOException e) {
throw new RuntimeException(e);
}
}, 1000, 30000);
om.awaitDoubleBufferFlush();
// Suspend the services first
om.getKeyManager().getDirDeletingService().suspend();
om.getKeyManager().getDeletingService().suspend();
om.getKeyManager().getSnapshotDeletingService().suspend();
String volume = "vol" + RandomStringUtils.secure().nextNumeric(3),
bucket = "bucket" + RandomStringUtils.secure().nextNumeric(3);
client.getObjectStore().createVolume(volume);
OzoneVolume ozoneVolume = client.getObjectStore().getVolume(volume);
ozoneVolume.createBucket(bucket);
OzoneBucket ozoneBucket = ozoneVolume.getBucket(bucket);
// Create snap0
client.getObjectStore().createSnapshot(volume, bucket, "snap0");
client.getObjectStore().getSnapshotInfo(volume, bucket, "snap0");
UUID snap1Id = client.getObjectStore().getSnapshotInfo(volume, bucket, "snap0").getSnapshotId();
// Create snap1
TestDataUtil.createKey(ozoneBucket, "key", CONTENT.array());
client.getObjectStore().createSnapshot(volume, bucket, "snap1");
UUID snap2Id = client.getObjectStore().getSnapshotInfo(volume, bucket, "snap1").getSnapshotId();
ozoneBucket.renameKey("key", "renamedKey");
ozoneBucket.deleteKey("renamedKey");
om.awaitDoubleBufferFlush();
UUID snap3Id;
ReclaimableKeyFilter keyFilter;
SnapshotInfo snapInfo;
// Create snap3 to test snapshot 3 deep cleaning otherwise just run on AOS.
if (kdsRunningOnAOS) {
snap3Id = null;
snapInfo = null;
keyFilter = new ReclaimableKeyFilter(om, om.getOmSnapshotManager(),
((OmMetadataManagerImpl)om.getMetadataManager()).getSnapshotChainManager(),
snapInfo, om.getKeyManager(), om.getMetadataManager().getLock());
} else {
client.getObjectStore().createSnapshot(volume, bucket, "snap2");
snap3Id = client.getObjectStore().getSnapshotInfo(volume, bucket, "snap2").getSnapshotId();
om.awaitDoubleBufferFlush();
SnapshotInfo snap = om.getMetadataManager().getSnapshotInfo(volume, bucket, "snap2");
snap.setDeepCleanedDeletedDir(true);
om.getMetadataManager().getSnapshotInfoTable().put(snap.getTableKey(), snap);
assertTrue(om.getMetadataManager().getSnapshotInfo(volume, bucket, "snap2")
.isDeepCleanedDeletedDir());
snapInfo = SnapshotUtils.getSnapshotInfo(om, volume, bucket, "snap2");
keyFilter = new ReclaimableKeyFilter(om, om.getOmSnapshotManager(),
((OmMetadataManagerImpl)om.getMetadataManager()).getSnapshotChainManager(),
snapInfo, getOmSnapshot(volume, bucket, "snap2").get().getKeyManager(),
om.getMetadataManager().getLock());
}
MultiSnapshotLocks sdsMultiLocks = new MultiSnapshotLocks(cluster.getOzoneManager().getMetadataManager().getLock(),
SNAPSHOT_GC_LOCK, true);
AtomicBoolean kdsWaitStarted = new AtomicBoolean(false);
AtomicBoolean kdsFinished = new AtomicBoolean(false);
AtomicBoolean sdsLockWaitStarted = new AtomicBoolean(false);
AtomicBoolean sdsLockAcquired = new AtomicBoolean(false);
try (MockedConstruction<MultiSnapshotLocks> mockedMultiSnapshotLock = mockConstruction(MultiSnapshotLocks.class,
(mocked, context) -> {
when(mocked.acquireLock(anyList())).thenAnswer(i -> {
List<UUID> ids = i.getArgument(0);
List<UUID> expectedIds = Arrays.asList(snap1Id, snap2Id, snap3Id).subList(snasphotDeleteIndex, Math.min(3,
snasphotDeleteIndex + 2)).stream().filter(Objects::nonNull).collect(Collectors.toList());
if (expectedIds.equals(ids) && !sdsLockWaitStarted.get() && !sdsLockAcquired.get()) {
sdsLockWaitStarted.set(true);
OMLockDetails lockDetails = sdsMultiLocks.acquireLock(ids);
assertTrue(kdsFinished::get);
sdsLockAcquired.set(true);
return lockDetails;
}
return sdsMultiLocks.acquireLock(ids);
});
doAnswer(i -> {
sdsMultiLocks.releaseLock();
return null;
}).when(mocked).releaseLock();
})) {
KeyDeletingService kds = new KeyDeletingService(om, om.getScmClient().getBlockClient(), 500, 10000,
om.getConfiguration(), 1, true);
kds.shutdown();
KeyDeletingService.KeyDeletingTask task = kds.new KeyDeletingTask(snap3Id);
CompletableFuture.supplyAsync(() -> {
try (MockedConstruction<ReclaimableKeyFilter> mockedReclaimableFilter = getMockedReclaimableKeyFilter(
volume, bucket, kdsWaitStarted, sdsLockWaitStarted, sdsLockAcquired, kdsFinished, keyFilter)) {
return task.call();
} catch (IOException e) {
throw new RuntimeException(e);
}
});
SnapshotDeletingService sds = new SnapshotDeletingService(500, 10000, om);
sds.shutdown();
GenericTestUtils.waitFor(kdsWaitStarted::get, 1000, 30000);
client.getObjectStore().deleteSnapshot(volume, bucket, "snap" + snasphotDeleteIndex);
sds.runPeriodicalTaskNow();
om.awaitDoubleBufferFlush();
if (snasphotDeleteIndex == 2) {
sds.runPeriodicalTaskNow();
}
assertTrue(sdsLockWaitStarted.get());
assertTrue(sdsLockAcquired.get());
assertThrows(IOException.class, () -> SnapshotUtils.getSnapshotInfo(om, volume, bucket,
"snap" + snasphotDeleteIndex));
}
}
private void verifySnapshotChain(SnapshotInfo deletedSnapshot,
String nextSnapshot)
throws Exception {
OmMetadataManagerImpl metadataManager = (OmMetadataManagerImpl)
om.getMetadataManager();
UUID pathPreviousSnapshotId = deletedSnapshot.getPathPreviousSnapshotId();
UUID globalPreviousSnapshotId =
deletedSnapshot.getGlobalPreviousSnapshotId();
GenericTestUtils.waitFor(() -> {
try {
SnapshotInfo snapshotInfo = metadataManager.getSnapshotInfoTable()
.get(deletedSnapshot.getTableKey());
return snapshotInfo == null;
} catch (IOException e) {
LOG.error("Error getting snapInfo.");
}
return false;
}, 100, 10000);
if (nextSnapshot != null) {
SnapshotInfo nextSnapshotInfo = metadataManager
.getSnapshotInfoTable().get(nextSnapshot);
GenericTestUtils.waitFor(() -> Objects.equals(
nextSnapshotInfo.getPathPreviousSnapshotId(), pathPreviousSnapshotId)
&& Objects.equals(nextSnapshotInfo.getGlobalPreviousSnapshotId(),
globalPreviousSnapshotId), 100, 10000);
}
}
private void assertTableRowCount(Table<String, ?> table, int count)
throws TimeoutException, InterruptedException {
GenericTestUtils.waitFor(() -> assertTableRowCount(count, table), 1000,
120000); // 2 minutes
}
private boolean assertTableRowCount(int expectedCount,
Table<String, ?> table) {
AtomicLong count = new AtomicLong(0L);
assertDoesNotThrow(() -> {
count.set(cluster.getOzoneManager().getMetadataManager().countRowsInTable(table));
LOG.info("{} actual row count={}, expectedCount={}", table.getName(),
count.get(), expectedCount);
});
return count.get() == expectedCount;
}
}
|
google/guava | 35,193 | guava/src/com/google/common/io/MoreFiles.java | /*
* Copyright (C) 2013 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.io;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.collect.Iterables.getOnlyElement;
import static java.nio.file.LinkOption.NOFOLLOW_LINKS;
import static java.util.Objects.requireNonNull;
import com.google.common.annotations.GwtIncompatible;
import com.google.common.annotations.J2ktIncompatible;
import com.google.common.base.Optional;
import com.google.common.base.Predicate;
import com.google.common.collect.ImmutableList;
import com.google.common.graph.Traverser;
import com.google.j2objc.annotations.J2ObjCIncompatible;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.channels.Channels;
import java.nio.channels.SeekableByteChannel;
import java.nio.charset.Charset;
import java.nio.file.DirectoryIteratorException;
import java.nio.file.DirectoryStream;
import java.nio.file.FileAlreadyExistsException;
import java.nio.file.FileSystemException;
import java.nio.file.Files;
import java.nio.file.LinkOption;
import java.nio.file.NoSuchFileException;
import java.nio.file.NotDirectoryException;
import java.nio.file.OpenOption;
import java.nio.file.Path;
import java.nio.file.SecureDirectoryStream;
import java.nio.file.StandardOpenOption;
import java.nio.file.attribute.BasicFileAttributeView;
import java.nio.file.attribute.BasicFileAttributes;
import java.nio.file.attribute.FileAttribute;
import java.nio.file.attribute.FileTime;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.stream.Stream;
import org.jspecify.annotations.Nullable;
/**
* Static utilities for use with {@link Path} instances, intended to complement {@link Files}.
*
* <p>Many methods provided by Guava's {@code Files} class for {@link java.io.File} instances are
* now available via the JDK's {@link java.nio.file.Files} class for {@code Path} - check the JDK's
* class if a sibling method from {@code Files} appears to be missing from this class.
*
* @since 21.0 (but only since 33.4.0 in the Android flavor)
* @author Colin Decker
*/
@J2ktIncompatible
@GwtIncompatible
@J2ObjCIncompatible // java.nio.file
public final class MoreFiles {
private MoreFiles() {}
/**
* Returns a view of the given {@code path} as a {@link ByteSource}.
*
* <p>Any {@linkplain OpenOption open options} provided are used when opening streams to the file
* and may affect the behavior of the returned source and the streams it provides. See {@link
* StandardOpenOption} for the standard options that may be provided. Providing no options is
* equivalent to providing the {@link StandardOpenOption#READ READ} option.
*/
public static ByteSource asByteSource(Path path, OpenOption... options) {
return new PathByteSource(path, options);
}
private static final class PathByteSource extends
ByteSource
{
private static final LinkOption[] FOLLOW_LINKS = {};
private final Path path;
private final OpenOption[] options;
private final boolean followLinks;
private PathByteSource(Path path, OpenOption... options) {
this.path = checkNotNull(path);
this.options = options.clone();
this.followLinks = followLinks(this.options);
// TODO(cgdecker): validate the provided options... for example, just WRITE seems wrong
}
private static boolean followLinks(OpenOption[] options) {
for (OpenOption option : options) {
if (option == NOFOLLOW_LINKS) {
return false;
}
}
return true;
}
@Override
public InputStream openStream() throws IOException {
return Files.newInputStream(path, options);
}
private BasicFileAttributes readAttributes() throws IOException {
return Files.readAttributes(
path,
BasicFileAttributes.class,
followLinks ? FOLLOW_LINKS : new LinkOption[] {NOFOLLOW_LINKS});
}
@Override
public Optional<Long> sizeIfKnown() {
BasicFileAttributes attrs;
try {
attrs = readAttributes();
} catch (IOException e) {
// Failed to get attributes; we don't know the size.
return Optional.absent();
}
// Don't return a size for directories or symbolic links; their sizes are implementation
// specific and they can't be read as bytes using the read methods anyway.
if (attrs.isDirectory() || attrs.isSymbolicLink()) {
return Optional.absent();
}
return Optional.of(attrs.size());
}
@Override
public long size() throws IOException {
BasicFileAttributes attrs = readAttributes();
// Don't return a size for directories or symbolic links; their sizes are implementation
// specific and they can't be read as bytes using the read methods anyway.
if (attrs.isDirectory()) {
throw new IOException("can't read: is a directory");
} else if (attrs.isSymbolicLink()) {
throw new IOException("can't read: is a symbolic link");
}
return attrs.size();
}
@Override
public byte[] read() throws IOException {
try (SeekableByteChannel channel = Files.newByteChannel(path, options)) {
return ByteStreams.toByteArray(Channels.newInputStream(channel), channel.size());
}
}
@Override
public CharSource asCharSource(Charset charset) {
if (options.length == 0) {
// If no OpenOptions were passed, delegate to Files.lines, which could have performance
// advantages. (If OpenOptions were passed we can't, because Files.lines doesn't have an
// overload taking OpenOptions, meaning we can't guarantee the same behavior w.r.t. things
// like following/not following symlinks.)
return new AsCharSource(charset) {
@SuppressWarnings("FilesLinesLeak") // the user needs to close it in this case
@Override
public Stream<String> lines() throws IOException {
return Files.lines(path, charset);
}
};
}
return super.asCharSource(charset);
}
@Override
public String toString() {
return "MoreFiles.asByteSource(" + path + ", " + Arrays.toString(options) + ")";
}
}
/**
* Returns a view of the given {@code path} as a {@link ByteSink}.
*
* <p>Any {@linkplain OpenOption open options} provided are used when opening streams to the file
* and may affect the behavior of the returned sink and the streams it provides. See {@link
* StandardOpenOption} for the standard options that may be provided. Providing no options is
* equivalent to providing the {@link StandardOpenOption#CREATE CREATE}, {@link
* StandardOpenOption#TRUNCATE_EXISTING TRUNCATE_EXISTING} and {@link StandardOpenOption#WRITE
* WRITE} options.
*/
public static ByteSink asByteSink(Path path, OpenOption... options) {
return new PathByteSink(path, options);
}
private static final class PathByteSink extends ByteSink {
private final Path path;
private final OpenOption[] options;
private PathByteSink(Path path, OpenOption... options) {
this.path = checkNotNull(path);
this.options = options.clone();
// TODO(cgdecker): validate the provided options... for example, just READ seems wrong
}
@Override
public OutputStream openStream() throws IOException {
return Files.newOutputStream(path, options);
}
@Override
public String toString() {
return "MoreFiles.asByteSink(" + path + ", " + Arrays.toString(options) + ")";
}
}
/**
* Returns a view of the given {@code path} as a {@link CharSource} using the given {@code
* charset}.
*
* <p>Any {@linkplain OpenOption open options} provided are used when opening streams to the file
* and may affect the behavior of the returned source and the streams it provides. See {@link
* StandardOpenOption} for the standard options that may be provided. Providing no options is
* equivalent to providing the {@link StandardOpenOption#READ READ} option.
*/
public static CharSource asCharSource(Path path, Charset charset, OpenOption... options) {
return asByteSource(path, options).asCharSource(charset);
}
/**
* Returns a view of the given {@code path} as a {@link CharSink} using the given {@code charset}.
*
* <p>Any {@linkplain OpenOption open options} provided are used when opening streams to the file
* and may affect the behavior of the returned sink and the streams it provides. See {@link
* StandardOpenOption} for the standard options that may be provided. Providing no options is
* equivalent to providing the {@link StandardOpenOption#CREATE CREATE}, {@link
* StandardOpenOption#TRUNCATE_EXISTING TRUNCATE_EXISTING} and {@link StandardOpenOption#WRITE
* WRITE} options.
*/
public static CharSink asCharSink(Path path, Charset charset, OpenOption... options) {
return asByteSink(path, options).asCharSink(charset);
}
/**
* Returns an immutable list of paths to the files contained in the given directory.
*
* @throws NoSuchFileException if the file does not exist <i>(optional specific exception)</i>
* @throws NotDirectoryException if the file could not be opened because it is not a directory
* <i>(optional specific exception)</i>
* @throws IOException if an I/O error occurs
*/
public static ImmutableList<Path> listFiles(Path dir) throws IOException {
try (DirectoryStream<Path> stream = Files.newDirectoryStream(dir)) {
return ImmutableList.copyOf(stream);
} catch (DirectoryIteratorException e) {
throw e.getCause();
}
}
/**
* Returns a {@link Traverser} instance for the file and directory tree. The returned traverser
* starts from a {@link Path} and will return all files and directories it encounters.
*
* <p>The returned traverser attempts to avoid following symbolic links to directories. However,
* the traverser cannot guarantee that it will not follow symbolic links to directories as it is
* possible for a directory to be replaced with a symbolic link between checking if the file is a
* directory and actually reading the contents of that directory.
*
* <p>If the {@link Path} passed to one of the traversal methods does not exist or is not a
* directory, no exception will be thrown and the returned {@link Iterable} will contain a single
* element: that path.
*
* <p>{@link DirectoryIteratorException} may be thrown when iterating {@link Iterable} instances
* created by this traverser if an {@link IOException} is thrown by a call to {@link
* #listFiles(Path)}.
*
* <p>Example: {@code MoreFiles.fileTraverser().depthFirstPreOrder(Paths.get("/"))} may return the
* following paths: {@code ["/", "/etc", "/etc/config.txt", "/etc/fonts", "/home", "/home/alice",
* ...]}
*
* @since 23.5
*/
public static Traverser<Path> fileTraverser() {
return Traverser.forTree(MoreFiles::fileTreeChildren);
}
private static Iterable<Path> fileTreeChildren(Path dir) {
if (Files.isDirectory(dir, NOFOLLOW_LINKS)) {
try {
return listFiles(dir);
} catch (IOException e) {
// the exception thrown when iterating a DirectoryStream if an I/O exception occurs
throw new DirectoryIteratorException(e);
}
}
return ImmutableList.of();
}
/**
* Returns a predicate that returns the result of {@link java.nio.file.Files#isDirectory(Path,
* LinkOption...)} on input paths with the given link options.
*/
public static Predicate<Path> isDirectory(LinkOption... options) {
LinkOption[] optionsCopy = options.clone();
return new Predicate<Path>() {
@Override
public boolean apply(Path input) {
return Files.isDirectory(input, optionsCopy);
}
@Override
public String toString() {
return "MoreFiles.isDirectory(" + Arrays.toString(optionsCopy) + ")";
}
};
}
/** Returns whether or not the file with the given name in the given dir is a directory. */
private static boolean isDirectory(
SecureDirectoryStream<Path> dir, Path name, LinkOption... options) throws IOException {
return dir.getFileAttributeView(name, BasicFileAttributeView.class, options)
.readAttributes()
.isDirectory();
}
/**
* Returns a predicate that returns the result of {@link java.nio.file.Files#isRegularFile(Path,
* LinkOption...)} on input paths with the given link options.
*/
public static Predicate<Path> isRegularFile(LinkOption... options) {
LinkOption[] optionsCopy = options.clone();
return new Predicate<Path>() {
@Override
public boolean apply(Path input) {
return Files.isRegularFile(input, optionsCopy);
}
@Override
public String toString() {
return "MoreFiles.isRegularFile(" + Arrays.toString(optionsCopy) + ")";
}
};
}
/**
* Returns true if the files located by the given paths exist, are not directories, and contain
* the same bytes.
*
* @throws IOException if an I/O error occurs
* @since 22.0
*/
public static boolean equal(Path path1, Path path2) throws IOException {
checkNotNull(path1);
checkNotNull(path2);
if (Files.isSameFile(path1, path2)) {
return true;
}
/*
* Some operating systems may return zero as the length for files denoting system-dependent
* entities such as devices or pipes, in which case we must fall back on comparing the bytes
* directly.
*/
ByteSource source1 = asByteSource(path1);
ByteSource source2 = asByteSource(path2);
long len1 = source1.sizeIfKnown().or(0L);
long len2 = source2.sizeIfKnown().or(0L);
if (len1 != 0 && len2 != 0 && len1 != len2) {
return false;
}
return source1.contentEquals(source2);
}
/**
* Like the unix command of the same name, creates an empty file or updates the last modified
* timestamp of the existing file at the given path to the current system time.
*/
@SuppressWarnings("GoodTime") // reading system time without TimeSource
public static void touch(Path path) throws IOException {
checkNotNull(path);
try {
Files.setLastModifiedTime(path, FileTime.fromMillis(System.currentTimeMillis()));
} catch (NoSuchFileException e) {
try {
Files.createFile(path);
} catch (FileAlreadyExistsException ignore) {
// The file didn't exist when we called setLastModifiedTime, but it did when we called
// createFile, so something else created the file in between. The end result is
// what we wanted: a new file that probably has its last modified time set to approximately
// now. Or it could have an arbitrary last modified time set by the creator, but that's no
// different than if another process set its last modified time to something else after we
// created it here.
}
}
}
/**
* Creates any necessary but nonexistent parent directories of the specified path. Note that if
* this operation fails, it may have succeeded in creating some (but not all) of the necessary
* parent directories. The parent directory is created with the given {@code attrs}.
*
* @throws IOException if an I/O error occurs, or if any necessary but nonexistent parent
* directories of the specified file could not be created.
*/
public static void createParentDirectories(Path path, FileAttribute<?>... attrs)
throws IOException {
// Interestingly, unlike File.getCanonicalFile(), Path/Files provides no way of getting the
// canonical (absolute, normalized, symlinks resolved, etc.) form of a path to a nonexistent
// file. getCanonicalFile() can at least get the canonical form of the part of the path which
// actually exists and then append the normalized remainder of the path to that.
Path normalizedAbsolutePath = path.toAbsolutePath().normalize();
Path parent = normalizedAbsolutePath.getParent();
if (parent == null) {
// The given directory is a filesystem root. All zero of its ancestors exist. This doesn't
// mean that the root itself exists -- consider x:\ on a Windows machine without such a
// drive -- or even that the caller can create it, but this method makes no such guarantees
// even for non-root files.
return;
}
// Check if the parent is a directory first because createDirectories will fail if the parent
// exists and is a symlink to a directory... we'd like for this to succeed in that case.
// (I'm kind of surprised that createDirectories would fail in that case; doesn't seem like
// what you'd want to happen.)
if (!Files.isDirectory(parent)) {
Files.createDirectories(parent, attrs);
if (!Files.isDirectory(parent)) {
throw new IOException("Unable to create parent directories of " + path);
}
}
}
/**
* Returns the <a href="http://en.wikipedia.org/wiki/Filename_extension">file extension</a> for
* the file at the given path, or the empty string if the file has no extension. The result does
* not include the '{@code .}'.
*
* <p><b>Note:</b> This method simply returns everything after the last '{@code .}' in the file's
* name as determined by {@link Path#getFileName}. It does not account for any filesystem-specific
* behavior that the {@link Path} API does not already account for. For example, on NTFS it will
* report {@code "txt"} as the extension for the filename {@code "foo.exe:.txt"} even though NTFS
* will drop the {@code ":.txt"} part of the name when the file is actually created on the
* filesystem due to NTFS's <a
* href="https://learn.microsoft.com/en-us/archive/blogs/askcore/alternate-data-streams-in-ntfs">Alternate
* Data Streams</a>.
*/
public static String getFileExtension(Path path) {
Path name = path.getFileName();
// null for empty paths and root-only paths
if (name == null) {
return "";
}
String fileName = name.toString();
int dotIndex = fileName.lastIndexOf('.');
return dotIndex == -1 ? "" : fileName.substring(dotIndex + 1);
}
/**
* Returns the file name without its <a
* href="http://en.wikipedia.org/wiki/Filename_extension">file extension</a> or path. This is
* similar to the {@code basename} unix command. The result does not include the '{@code .}'.
*/
public static String getNameWithoutExtension(Path path) {
Path name = path.getFileName();
// null for empty paths and root-only paths
if (name == null) {
return "";
}
String fileName = name.toString();
int dotIndex = fileName.lastIndexOf('.');
return dotIndex == -1 ? fileName : fileName.substring(0, dotIndex);
}
/**
* Deletes the file or directory at the given {@code path} recursively. Deletes symbolic links,
* not their targets (subject to the caveat below).
*
* <p>If an I/O exception occurs attempting to read, open or delete any file under the given
* directory, this method skips that file and continues. All such exceptions are collected and,
* after attempting to delete all files, an {@code IOException} is thrown containing those
* exceptions as {@linkplain Throwable#getSuppressed() suppressed exceptions}.
*
* <h2>Warning: Security of recursive deletes</h2>
*
* <p>On a file system that supports symbolic links and does <i>not</i> support {@link
* SecureDirectoryStream}, it is possible for a recursive delete to delete files and directories
* that are <i>outside</i> the directory being deleted. This can happen if, after checking that a
* file is a directory (and not a symbolic link), that directory is replaced by a symbolic link to
* an outside directory before the call that opens the directory to read its entries.
*
* <p>By default, this method throws {@link InsecureRecursiveDeleteException} if it can't
* guarantee the security of recursive deletes. If you wish to allow the recursive deletes anyway,
* pass {@link RecursiveDeleteOption#ALLOW_INSECURE} to this method to override that behavior.
*
* @throws NoSuchFileException if {@code path} does not exist <i>(optional specific exception)</i>
* @throws InsecureRecursiveDeleteException if the security of recursive deletes can't be
* guaranteed for the file system and {@link RecursiveDeleteOption#ALLOW_INSECURE} was not
* specified
* @throws IOException if {@code path} or any file in the subtree rooted at it can't be deleted
* for any reason
*/
public static void deleteRecursively(Path path, RecursiveDeleteOption... options)
throws IOException {
Path parentPath = getParentPath(path);
if (parentPath == null) {
throw new FileSystemException(path.toString(), null, "can't delete recursively");
}
Collection<IOException> exceptions = null; // created lazily if needed
try {
boolean sdsSupported = false;
try (DirectoryStream<Path> parent = Files.newDirectoryStream(parentPath)) {
if (parent instanceof SecureDirectoryStream) {
sdsSupported = true;
exceptions =
deleteRecursivelySecure(
(SecureDirectoryStream<Path>) parent,
/*
* requireNonNull is safe because paths have file names when they have parents,
* and we checked for a parent at the beginning of the method.
*/
requireNonNull(path.getFileName()));
}
}
if (!sdsSupported) {
checkAllowsInsecure(path, options);
exceptions = deleteRecursivelyInsecure(path);
}
} catch (IOException e) {
if (exceptions == null) {
throw e;
} else {
exceptions.add(e);
}
}
if (exceptions != null) {
throwDeleteFailed(path, exceptions);
}
}
/**
* Deletes all files within the directory at the given {@code path} {@linkplain #deleteRecursively
* recursively}. Does not delete the directory itself. Deletes symbolic links, not their targets
* (subject to the caveat below). If {@code path} itself is a symbolic link to a directory, that
* link is followed and the contents of the directory it targets are deleted.
*
* <p>If an I/O exception occurs attempting to read, open or delete any file under the given
* directory, this method skips that file and continues. All such exceptions are collected and,
* after attempting to delete all files, an {@code IOException} is thrown containing those
* exceptions as {@linkplain Throwable#getSuppressed() suppressed exceptions}.
*
* <h2>Warning: Security of recursive deletes</h2>
*
* <p>On a file system that supports symbolic links and does <i>not</i> support {@link
* SecureDirectoryStream}, it is possible for a recursive delete to delete files and directories
* that are <i>outside</i> the directory being deleted. This can happen if, after checking that a
* file is a directory (and not a symbolic link), that directory is replaced by a symbolic link to
* an outside directory before the call that opens the directory to read its entries.
*
* <p>By default, this method throws {@link InsecureRecursiveDeleteException} if it can't
* guarantee the security of recursive deletes. If you wish to allow the recursive deletes anyway,
* pass {@link RecursiveDeleteOption#ALLOW_INSECURE} to this method to override that behavior.
*
* @throws NoSuchFileException if {@code path} does not exist <i>(optional specific exception)</i>
* @throws NotDirectoryException if the file at {@code path} is not a directory <i>(optional
* specific exception)</i>
* @throws InsecureRecursiveDeleteException if the security of recursive deletes can't be
* guaranteed for the file system and {@link RecursiveDeleteOption#ALLOW_INSECURE} was not
* specified
* @throws IOException if one or more files can't be deleted for any reason
*/
public static void deleteDirectoryContents(Path path, RecursiveDeleteOption... options)
throws IOException {
Collection<IOException> exceptions = null; // created lazily if needed
try (DirectoryStream<Path> stream = Files.newDirectoryStream(path)) {
if (stream instanceof SecureDirectoryStream) {
SecureDirectoryStream<Path> sds = (SecureDirectoryStream<Path>) stream;
exceptions = deleteDirectoryContentsSecure(sds);
} else {
checkAllowsInsecure(path, options);
exceptions = deleteDirectoryContentsInsecure(stream);
}
} catch (IOException e) {
if (exceptions == null) {
throw e;
} else {
exceptions.add(e);
}
}
if (exceptions != null) {
throwDeleteFailed(path, exceptions);
}
}
/**
* Secure recursive delete using {@code SecureDirectoryStream}. Returns a collection of exceptions
* that occurred or null if no exceptions were thrown.
*/
private static @Nullable Collection<IOException> deleteRecursivelySecure(
SecureDirectoryStream<Path> dir, Path path) {
Collection<IOException> exceptions = null;
try {
if (isDirectory(dir, path, NOFOLLOW_LINKS)) {
try (SecureDirectoryStream<Path> childDir = dir.newDirectoryStream(path, NOFOLLOW_LINKS)) {
exceptions = deleteDirectoryContentsSecure(childDir);
}
// If exceptions is not null, something went wrong trying to delete the contents of the
// directory, so we shouldn't try to delete the directory as it will probably fail.
if (exceptions == null) {
dir.deleteDirectory(path);
}
} else {
dir.deleteFile(path);
}
return exceptions;
} catch (IOException e) {
return addException(exceptions, e);
}
}
/**
* Secure method for deleting the contents of a directory using {@code SecureDirectoryStream}.
* Returns a collection of exceptions that occurred or null if no exceptions were thrown.
*/
private static @Nullable Collection<IOException> deleteDirectoryContentsSecure(
SecureDirectoryStream<Path> dir) {
Collection<IOException> exceptions = null;
try {
for (Path path : dir) {
exceptions = concat(exceptions, deleteRecursivelySecure(dir, path.getFileName()));
}
return exceptions;
} catch (DirectoryIteratorException e) {
return addException(exceptions, e.getCause());
}
}
/**
* Insecure recursive delete for file systems that don't support {@code SecureDirectoryStream}.
* Returns a collection of exceptions that occurred or null if no exceptions were thrown.
*/
private static @Nullable Collection<IOException> deleteRecursivelyInsecure(Path path) {
Collection<IOException> exceptions = null;
try {
if (Files.isDirectory(path, NOFOLLOW_LINKS)) {
try (DirectoryStream<Path> stream = Files.newDirectoryStream(path)) {
exceptions = deleteDirectoryContentsInsecure(stream);
}
}
// If exceptions is not null, something went wrong trying to delete the contents of the
// directory, so we shouldn't try to delete the directory as it will probably fail.
if (exceptions == null) {
Files.delete(path);
}
return exceptions;
} catch (IOException e) {
return addException(exceptions, e);
}
}
/**
* Simple, insecure method for deleting the contents of a directory for file systems that don't
* support {@code SecureDirectoryStream}. Returns a collection of exceptions that occurred or null
* if no exceptions were thrown.
*/
private static @Nullable Collection<IOException> deleteDirectoryContentsInsecure(
DirectoryStream<Path> dir) {
Collection<IOException> exceptions = null;
try {
for (Path entry : dir) {
exceptions = concat(exceptions, deleteRecursivelyInsecure(entry));
}
return exceptions;
} catch (DirectoryIteratorException e) {
return addException(exceptions, e.getCause());
}
}
/**
* Returns a path to the parent directory of the given path. If the path actually has a parent
* path, this is simple. Otherwise, we need to do some trickier things. Returns null if the path
* is a root or is the empty path.
*/
private static @Nullable Path getParentPath(Path path) {
Path parent = path.getParent();
// Paths that have a parent:
if (parent != null) {
// "/foo" ("/")
// "foo/bar" ("foo")
// "C:\foo" ("C:\")
// "\foo" ("\" - current drive for process on Windows)
// "C:foo" ("C:" - working dir of drive C on Windows)
return parent;
}
// Paths that don't have a parent:
if (path.getNameCount() == 0) {
// "/", "C:\", "\" (no parent)
// "" (undefined, though typically parent of working dir)
// "C:" (parent of working dir of drive C on Windows)
//
// For working dir paths ("" and "C:"), return null because:
// A) it's not specified that "" is the path to the working directory.
// B) if we're getting this path for recursive delete, it's typically not possible to
// delete the working dir with a relative path anyway, so it's ok to fail.
// C) if we're getting it for opening a new SecureDirectoryStream, there's no need to get
// the parent path anyway since we can safely open a DirectoryStream to the path without
// worrying about a symlink.
return null;
} else {
// "foo" (working dir)
return path.getFileSystem().getPath(".");
}
}
/** Checks that the given options allow an insecure delete, throwing an exception if not. */
private static void checkAllowsInsecure(Path path, RecursiveDeleteOption[] options)
throws InsecureRecursiveDeleteException {
if (!Arrays.asList(options).contains(RecursiveDeleteOption.ALLOW_INSECURE)) {
throw new InsecureRecursiveDeleteException(path.toString());
}
}
/**
* Adds the given exception to the given collection, creating the collection if it's null. Returns
* the collection.
*/
private static Collection<IOException> addException(
@Nullable Collection<IOException> exceptions, IOException e) {
if (exceptions == null) {
exceptions = new ArrayList<>(); // don't need Set semantics
}
exceptions.add(e);
return exceptions;
}
/**
* Concatenates the contents of the two given collections of exceptions. If either collection is
* null, the other collection is returned. Otherwise, the elements of {@code other} are added to
* {@code exceptions} and {@code exceptions} is returned.
*/
private static @Nullable Collection<IOException> concat(
@Nullable Collection<IOException> exceptions, @Nullable Collection<IOException> other) {
if (exceptions == null) {
return other;
} else if (other != null) {
exceptions.addAll(other);
}
return exceptions;
}
/**
* Throws an exception indicating that one or more files couldn't be deleted when deleting {@code
* path} or its contents.
*
* <p>If there is only one exception in the collection, and it is a {@link NoSuchFileException}
* thrown because {@code path} itself didn't exist, then throws that exception. Otherwise, the
* thrown exception contains all the exceptions in the given collection as suppressed exceptions.
*/
private static void throwDeleteFailed(Path path, Collection<IOException> exceptions)
throws FileSystemException {
NoSuchFileException pathNotFound = pathNotFound(path, exceptions);
if (pathNotFound != null) {
throw pathNotFound;
}
// TODO(cgdecker): Should there be a custom exception type for this?
// Also, should we try to include the Path of each file we may have failed to delete rather
// than just the exceptions that occurred?
FileSystemException deleteFailed =
new FileSystemException(
path.toString(),
null,
"failed to delete one or more files; see suppressed exceptions for details");
for (IOException e : exceptions) {
deleteFailed.addSuppressed(e);
}
throw deleteFailed;
}
private static @Nullable NoSuchFileException pathNotFound(
Path path, Collection<IOException> exceptions) {
if (exceptions.size() != 1) {
return null;
}
IOException exception = getOnlyElement(exceptions);
if (!(exception instanceof NoSuchFileException)) {
return null;
}
NoSuchFileException noSuchFileException = (NoSuchFileException) exception;
String exceptionFile = noSuchFileException.getFile();
if (exceptionFile == null) {
/*
* It's not clear whether this happens in practice, especially with the filesystem
* implementations that are built into java.nio.
*/
return null;
}
Path parentPath = getParentPath(path);
if (parentPath == null) {
/*
* This is probably impossible:
*
* - In deleteRecursively, we require the path argument to have a parent.
*
* - In deleteDirectoryContents, the path argument may have no parent. Fortunately, all the
* *other* paths we process will be descendants of that. That leaves only the original path
* argument for us to consider. And the only place we call pathNotFound is from
* throwDeleteFailed, and the other place that we call throwDeleteFailed inside
* deleteDirectoryContents is when an exception is thrown during the recursive steps. Any
* failure during the initial lookup of the path argument itself is rethrown directly. So
* any exception that we're seeing here is from a descendant, which naturally has a parent.
* I think.
*
* Still, if this can happen somehow (a weird filesystem implementation that lets callers
* change its working directly concurrently with a call to deleteDirectoryContents?), it makes
* more sense for us to fall back to a generic FileSystemException (by returning null here)
* than to dereference parentPath and end up producing NullPointerException.
*/
return null;
}
// requireNonNull is safe because paths have file names when they have parents.
Path pathResolvedFromParent = parentPath.resolve(requireNonNull(path.getFileName()));
if (exceptionFile.equals(pathResolvedFromParent.toString())) {
return noSuchFileException;
}
return null;
}
}
|
googleapis/google-cloud-java | 34,897 | java-datacatalog/proto-google-cloud-datacatalog-v1/src/main/java/com/google/cloud/datacatalog/v1/ListEntriesResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/datacatalog/v1/datacatalog.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.datacatalog.v1;
/**
*
*
* <pre>
* Response message for
* [ListEntries][google.cloud.datacatalog.v1.DataCatalog.ListEntries].
* </pre>
*
* Protobuf type {@code google.cloud.datacatalog.v1.ListEntriesResponse}
*/
public final class ListEntriesResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.datacatalog.v1.ListEntriesResponse)
ListEntriesResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListEntriesResponse.newBuilder() to construct.
private ListEntriesResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListEntriesResponse() {
entries_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListEntriesResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.datacatalog.v1.Datacatalog
.internal_static_google_cloud_datacatalog_v1_ListEntriesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.datacatalog.v1.Datacatalog
.internal_static_google_cloud_datacatalog_v1_ListEntriesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.datacatalog.v1.ListEntriesResponse.class,
com.google.cloud.datacatalog.v1.ListEntriesResponse.Builder.class);
}
public static final int ENTRIES_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.datacatalog.v1.Entry> entries_;
/**
*
*
* <pre>
* Entry details.
* </pre>
*
* <code>repeated .google.cloud.datacatalog.v1.Entry entries = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.datacatalog.v1.Entry> getEntriesList() {
return entries_;
}
/**
*
*
* <pre>
* Entry details.
* </pre>
*
* <code>repeated .google.cloud.datacatalog.v1.Entry entries = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.datacatalog.v1.EntryOrBuilder>
getEntriesOrBuilderList() {
return entries_;
}
/**
*
*
* <pre>
* Entry details.
* </pre>
*
* <code>repeated .google.cloud.datacatalog.v1.Entry entries = 1;</code>
*/
@java.lang.Override
public int getEntriesCount() {
return entries_.size();
}
/**
*
*
* <pre>
* Entry details.
* </pre>
*
* <code>repeated .google.cloud.datacatalog.v1.Entry entries = 1;</code>
*/
@java.lang.Override
public com.google.cloud.datacatalog.v1.Entry getEntries(int index) {
return entries_.get(index);
}
/**
*
*
* <pre>
* Entry details.
* </pre>
*
* <code>repeated .google.cloud.datacatalog.v1.Entry entries = 1;</code>
*/
@java.lang.Override
public com.google.cloud.datacatalog.v1.EntryOrBuilder getEntriesOrBuilder(int index) {
return entries_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Pagination token of the next results page. Empty if there are no more items
* in results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* Pagination token of the next results page. Empty if there are no more items
* in results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < entries_.size(); i++) {
output.writeMessage(1, entries_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < entries_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, entries_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.datacatalog.v1.ListEntriesResponse)) {
return super.equals(obj);
}
com.google.cloud.datacatalog.v1.ListEntriesResponse other =
(com.google.cloud.datacatalog.v1.ListEntriesResponse) obj;
if (!getEntriesList().equals(other.getEntriesList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getEntriesCount() > 0) {
hash = (37 * hash) + ENTRIES_FIELD_NUMBER;
hash = (53 * hash) + getEntriesList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.datacatalog.v1.ListEntriesResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datacatalog.v1.ListEntriesResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datacatalog.v1.ListEntriesResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datacatalog.v1.ListEntriesResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datacatalog.v1.ListEntriesResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datacatalog.v1.ListEntriesResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datacatalog.v1.ListEntriesResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.datacatalog.v1.ListEntriesResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.datacatalog.v1.ListEntriesResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.datacatalog.v1.ListEntriesResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.datacatalog.v1.ListEntriesResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.datacatalog.v1.ListEntriesResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.datacatalog.v1.ListEntriesResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for
* [ListEntries][google.cloud.datacatalog.v1.DataCatalog.ListEntries].
* </pre>
*
* Protobuf type {@code google.cloud.datacatalog.v1.ListEntriesResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.datacatalog.v1.ListEntriesResponse)
com.google.cloud.datacatalog.v1.ListEntriesResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.datacatalog.v1.Datacatalog
.internal_static_google_cloud_datacatalog_v1_ListEntriesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.datacatalog.v1.Datacatalog
.internal_static_google_cloud_datacatalog_v1_ListEntriesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.datacatalog.v1.ListEntriesResponse.class,
com.google.cloud.datacatalog.v1.ListEntriesResponse.Builder.class);
}
// Construct using com.google.cloud.datacatalog.v1.ListEntriesResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (entriesBuilder_ == null) {
entries_ = java.util.Collections.emptyList();
} else {
entries_ = null;
entriesBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.datacatalog.v1.Datacatalog
.internal_static_google_cloud_datacatalog_v1_ListEntriesResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.datacatalog.v1.ListEntriesResponse getDefaultInstanceForType() {
return com.google.cloud.datacatalog.v1.ListEntriesResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.datacatalog.v1.ListEntriesResponse build() {
com.google.cloud.datacatalog.v1.ListEntriesResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.datacatalog.v1.ListEntriesResponse buildPartial() {
com.google.cloud.datacatalog.v1.ListEntriesResponse result =
new com.google.cloud.datacatalog.v1.ListEntriesResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.datacatalog.v1.ListEntriesResponse result) {
if (entriesBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
entries_ = java.util.Collections.unmodifiableList(entries_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.entries_ = entries_;
} else {
result.entries_ = entriesBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.datacatalog.v1.ListEntriesResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.datacatalog.v1.ListEntriesResponse) {
return mergeFrom((com.google.cloud.datacatalog.v1.ListEntriesResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.datacatalog.v1.ListEntriesResponse other) {
if (other == com.google.cloud.datacatalog.v1.ListEntriesResponse.getDefaultInstance())
return this;
if (entriesBuilder_ == null) {
if (!other.entries_.isEmpty()) {
if (entries_.isEmpty()) {
entries_ = other.entries_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureEntriesIsMutable();
entries_.addAll(other.entries_);
}
onChanged();
}
} else {
if (!other.entries_.isEmpty()) {
if (entriesBuilder_.isEmpty()) {
entriesBuilder_.dispose();
entriesBuilder_ = null;
entries_ = other.entries_;
bitField0_ = (bitField0_ & ~0x00000001);
entriesBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getEntriesFieldBuilder()
: null;
} else {
entriesBuilder_.addAllMessages(other.entries_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.datacatalog.v1.Entry m =
input.readMessage(
com.google.cloud.datacatalog.v1.Entry.parser(), extensionRegistry);
if (entriesBuilder_ == null) {
ensureEntriesIsMutable();
entries_.add(m);
} else {
entriesBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.datacatalog.v1.Entry> entries_ =
java.util.Collections.emptyList();
private void ensureEntriesIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
entries_ = new java.util.ArrayList<com.google.cloud.datacatalog.v1.Entry>(entries_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.datacatalog.v1.Entry,
com.google.cloud.datacatalog.v1.Entry.Builder,
com.google.cloud.datacatalog.v1.EntryOrBuilder>
entriesBuilder_;
/**
*
*
* <pre>
* Entry details.
* </pre>
*
* <code>repeated .google.cloud.datacatalog.v1.Entry entries = 1;</code>
*/
public java.util.List<com.google.cloud.datacatalog.v1.Entry> getEntriesList() {
if (entriesBuilder_ == null) {
return java.util.Collections.unmodifiableList(entries_);
} else {
return entriesBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* Entry details.
* </pre>
*
* <code>repeated .google.cloud.datacatalog.v1.Entry entries = 1;</code>
*/
public int getEntriesCount() {
if (entriesBuilder_ == null) {
return entries_.size();
} else {
return entriesBuilder_.getCount();
}
}
/**
*
*
* <pre>
* Entry details.
* </pre>
*
* <code>repeated .google.cloud.datacatalog.v1.Entry entries = 1;</code>
*/
public com.google.cloud.datacatalog.v1.Entry getEntries(int index) {
if (entriesBuilder_ == null) {
return entries_.get(index);
} else {
return entriesBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* Entry details.
* </pre>
*
* <code>repeated .google.cloud.datacatalog.v1.Entry entries = 1;</code>
*/
public Builder setEntries(int index, com.google.cloud.datacatalog.v1.Entry value) {
if (entriesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureEntriesIsMutable();
entries_.set(index, value);
onChanged();
} else {
entriesBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Entry details.
* </pre>
*
* <code>repeated .google.cloud.datacatalog.v1.Entry entries = 1;</code>
*/
public Builder setEntries(
int index, com.google.cloud.datacatalog.v1.Entry.Builder builderForValue) {
if (entriesBuilder_ == null) {
ensureEntriesIsMutable();
entries_.set(index, builderForValue.build());
onChanged();
} else {
entriesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Entry details.
* </pre>
*
* <code>repeated .google.cloud.datacatalog.v1.Entry entries = 1;</code>
*/
public Builder addEntries(com.google.cloud.datacatalog.v1.Entry value) {
if (entriesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureEntriesIsMutable();
entries_.add(value);
onChanged();
} else {
entriesBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* Entry details.
* </pre>
*
* <code>repeated .google.cloud.datacatalog.v1.Entry entries = 1;</code>
*/
public Builder addEntries(int index, com.google.cloud.datacatalog.v1.Entry value) {
if (entriesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureEntriesIsMutable();
entries_.add(index, value);
onChanged();
} else {
entriesBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Entry details.
* </pre>
*
* <code>repeated .google.cloud.datacatalog.v1.Entry entries = 1;</code>
*/
public Builder addEntries(com.google.cloud.datacatalog.v1.Entry.Builder builderForValue) {
if (entriesBuilder_ == null) {
ensureEntriesIsMutable();
entries_.add(builderForValue.build());
onChanged();
} else {
entriesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Entry details.
* </pre>
*
* <code>repeated .google.cloud.datacatalog.v1.Entry entries = 1;</code>
*/
public Builder addEntries(
int index, com.google.cloud.datacatalog.v1.Entry.Builder builderForValue) {
if (entriesBuilder_ == null) {
ensureEntriesIsMutable();
entries_.add(index, builderForValue.build());
onChanged();
} else {
entriesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Entry details.
* </pre>
*
* <code>repeated .google.cloud.datacatalog.v1.Entry entries = 1;</code>
*/
public Builder addAllEntries(
java.lang.Iterable<? extends com.google.cloud.datacatalog.v1.Entry> values) {
if (entriesBuilder_ == null) {
ensureEntriesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, entries_);
onChanged();
} else {
entriesBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* Entry details.
* </pre>
*
* <code>repeated .google.cloud.datacatalog.v1.Entry entries = 1;</code>
*/
public Builder clearEntries() {
if (entriesBuilder_ == null) {
entries_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
entriesBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* Entry details.
* </pre>
*
* <code>repeated .google.cloud.datacatalog.v1.Entry entries = 1;</code>
*/
public Builder removeEntries(int index) {
if (entriesBuilder_ == null) {
ensureEntriesIsMutable();
entries_.remove(index);
onChanged();
} else {
entriesBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* Entry details.
* </pre>
*
* <code>repeated .google.cloud.datacatalog.v1.Entry entries = 1;</code>
*/
public com.google.cloud.datacatalog.v1.Entry.Builder getEntriesBuilder(int index) {
return getEntriesFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* Entry details.
* </pre>
*
* <code>repeated .google.cloud.datacatalog.v1.Entry entries = 1;</code>
*/
public com.google.cloud.datacatalog.v1.EntryOrBuilder getEntriesOrBuilder(int index) {
if (entriesBuilder_ == null) {
return entries_.get(index);
} else {
return entriesBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* Entry details.
* </pre>
*
* <code>repeated .google.cloud.datacatalog.v1.Entry entries = 1;</code>
*/
public java.util.List<? extends com.google.cloud.datacatalog.v1.EntryOrBuilder>
getEntriesOrBuilderList() {
if (entriesBuilder_ != null) {
return entriesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(entries_);
}
}
/**
*
*
* <pre>
* Entry details.
* </pre>
*
* <code>repeated .google.cloud.datacatalog.v1.Entry entries = 1;</code>
*/
public com.google.cloud.datacatalog.v1.Entry.Builder addEntriesBuilder() {
return getEntriesFieldBuilder()
.addBuilder(com.google.cloud.datacatalog.v1.Entry.getDefaultInstance());
}
/**
*
*
* <pre>
* Entry details.
* </pre>
*
* <code>repeated .google.cloud.datacatalog.v1.Entry entries = 1;</code>
*/
public com.google.cloud.datacatalog.v1.Entry.Builder addEntriesBuilder(int index) {
return getEntriesFieldBuilder()
.addBuilder(index, com.google.cloud.datacatalog.v1.Entry.getDefaultInstance());
}
/**
*
*
* <pre>
* Entry details.
* </pre>
*
* <code>repeated .google.cloud.datacatalog.v1.Entry entries = 1;</code>
*/
public java.util.List<com.google.cloud.datacatalog.v1.Entry.Builder> getEntriesBuilderList() {
return getEntriesFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.datacatalog.v1.Entry,
com.google.cloud.datacatalog.v1.Entry.Builder,
com.google.cloud.datacatalog.v1.EntryOrBuilder>
getEntriesFieldBuilder() {
if (entriesBuilder_ == null) {
entriesBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.datacatalog.v1.Entry,
com.google.cloud.datacatalog.v1.Entry.Builder,
com.google.cloud.datacatalog.v1.EntryOrBuilder>(
entries_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
entries_ = null;
}
return entriesBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Pagination token of the next results page. Empty if there are no more items
* in results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Pagination token of the next results page. Empty if there are no more items
* in results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Pagination token of the next results page. Empty if there are no more items
* in results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Pagination token of the next results page. Empty if there are no more items
* in results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Pagination token of the next results page. Empty if there are no more items
* in results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.datacatalog.v1.ListEntriesResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.datacatalog.v1.ListEntriesResponse)
private static final com.google.cloud.datacatalog.v1.ListEntriesResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.datacatalog.v1.ListEntriesResponse();
}
public static com.google.cloud.datacatalog.v1.ListEntriesResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListEntriesResponse> PARSER =
new com.google.protobuf.AbstractParser<ListEntriesResponse>() {
@java.lang.Override
public ListEntriesResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListEntriesResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListEntriesResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.datacatalog.v1.ListEntriesResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 35,006 | java-analyticshub/proto-google-cloud-analyticshub-v1/src/main/java/com/google/cloud/bigquery/analyticshub/v1/JavaScriptUDF.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/bigquery/analyticshub/v1/pubsub.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.bigquery.analyticshub.v1;
/**
*
*
* <pre>
* User-defined JavaScript function that can transform or filter a Pub/Sub
* message.
* </pre>
*
* Protobuf type {@code google.cloud.bigquery.analyticshub.v1.JavaScriptUDF}
*/
public final class JavaScriptUDF extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.bigquery.analyticshub.v1.JavaScriptUDF)
JavaScriptUDFOrBuilder {
private static final long serialVersionUID = 0L;
// Use JavaScriptUDF.newBuilder() to construct.
private JavaScriptUDF(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private JavaScriptUDF() {
functionName_ = "";
code_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new JavaScriptUDF();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.bigquery.analyticshub.v1.PubsubProto
.internal_static_google_cloud_bigquery_analyticshub_v1_JavaScriptUDF_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.bigquery.analyticshub.v1.PubsubProto
.internal_static_google_cloud_bigquery_analyticshub_v1_JavaScriptUDF_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.bigquery.analyticshub.v1.JavaScriptUDF.class,
com.google.cloud.bigquery.analyticshub.v1.JavaScriptUDF.Builder.class);
}
public static final int FUNCTION_NAME_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object functionName_ = "";
/**
*
*
* <pre>
* Required. Name of the JavasScript function that should applied to Pub/Sub
* messages.
* </pre>
*
* <code>string function_name = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The functionName.
*/
@java.lang.Override
public java.lang.String getFunctionName() {
java.lang.Object ref = functionName_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
functionName_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. Name of the JavasScript function that should applied to Pub/Sub
* messages.
* </pre>
*
* <code>string function_name = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for functionName.
*/
@java.lang.Override
public com.google.protobuf.ByteString getFunctionNameBytes() {
java.lang.Object ref = functionName_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
functionName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int CODE_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object code_ = "";
/**
*
*
* <pre>
* Required. JavaScript code that contains a function `function_name` with the
* below signature:
*
* ```
* /**
* * Transforms a Pub/Sub message.
*
* * @return {(Object<string, (string | Object<string, string>)>|null)} - To
* * filter a message, return `null`. To transform a message return a map
* * with the following keys:
* * - (required) 'data' : {string}
* * - (optional) 'attributes' : {Object<string, string>}
* * Returning empty `attributes` will remove all attributes from the
* * message.
* *
* * @param {(Object<string, (string | Object<string, string>)>} Pub/Sub
* * message. Keys:
* * - (required) 'data' : {string}
* * - (required) 'attributes' : {Object<string, string>}
* *
* * @param {Object<string, any>} metadata - Pub/Sub message metadata.
* * Keys:
* * - (required) 'message_id' : {string}
* * - (optional) 'publish_time': {string} YYYY-MM-DDTHH:MM:SSZ format
* * - (optional) 'ordering_key': {string}
* */
*
* function <function_name>(message, metadata) {
* }
* ```
* </pre>
*
* <code>string code = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The code.
*/
@java.lang.Override
public java.lang.String getCode() {
java.lang.Object ref = code_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
code_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. JavaScript code that contains a function `function_name` with the
* below signature:
*
* ```
* /**
* * Transforms a Pub/Sub message.
*
* * @return {(Object<string, (string | Object<string, string>)>|null)} - To
* * filter a message, return `null`. To transform a message return a map
* * with the following keys:
* * - (required) 'data' : {string}
* * - (optional) 'attributes' : {Object<string, string>}
* * Returning empty `attributes` will remove all attributes from the
* * message.
* *
* * @param {(Object<string, (string | Object<string, string>)>} Pub/Sub
* * message. Keys:
* * - (required) 'data' : {string}
* * - (required) 'attributes' : {Object<string, string>}
* *
* * @param {Object<string, any>} metadata - Pub/Sub message metadata.
* * Keys:
* * - (required) 'message_id' : {string}
* * - (optional) 'publish_time': {string} YYYY-MM-DDTHH:MM:SSZ format
* * - (optional) 'ordering_key': {string}
* */
*
* function <function_name>(message, metadata) {
* }
* ```
* </pre>
*
* <code>string code = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for code.
*/
@java.lang.Override
public com.google.protobuf.ByteString getCodeBytes() {
java.lang.Object ref = code_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
code_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(functionName_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, functionName_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(code_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, code_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(functionName_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, functionName_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(code_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, code_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.bigquery.analyticshub.v1.JavaScriptUDF)) {
return super.equals(obj);
}
com.google.cloud.bigquery.analyticshub.v1.JavaScriptUDF other =
(com.google.cloud.bigquery.analyticshub.v1.JavaScriptUDF) obj;
if (!getFunctionName().equals(other.getFunctionName())) return false;
if (!getCode().equals(other.getCode())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + FUNCTION_NAME_FIELD_NUMBER;
hash = (53 * hash) + getFunctionName().hashCode();
hash = (37 * hash) + CODE_FIELD_NUMBER;
hash = (53 * hash) + getCode().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.bigquery.analyticshub.v1.JavaScriptUDF parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.bigquery.analyticshub.v1.JavaScriptUDF parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.bigquery.analyticshub.v1.JavaScriptUDF parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.bigquery.analyticshub.v1.JavaScriptUDF parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.bigquery.analyticshub.v1.JavaScriptUDF parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.bigquery.analyticshub.v1.JavaScriptUDF parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.bigquery.analyticshub.v1.JavaScriptUDF parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.bigquery.analyticshub.v1.JavaScriptUDF parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.bigquery.analyticshub.v1.JavaScriptUDF parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.bigquery.analyticshub.v1.JavaScriptUDF parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.bigquery.analyticshub.v1.JavaScriptUDF parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.bigquery.analyticshub.v1.JavaScriptUDF parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.bigquery.analyticshub.v1.JavaScriptUDF prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* User-defined JavaScript function that can transform or filter a Pub/Sub
* message.
* </pre>
*
* Protobuf type {@code google.cloud.bigquery.analyticshub.v1.JavaScriptUDF}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.bigquery.analyticshub.v1.JavaScriptUDF)
com.google.cloud.bigquery.analyticshub.v1.JavaScriptUDFOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.bigquery.analyticshub.v1.PubsubProto
.internal_static_google_cloud_bigquery_analyticshub_v1_JavaScriptUDF_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.bigquery.analyticshub.v1.PubsubProto
.internal_static_google_cloud_bigquery_analyticshub_v1_JavaScriptUDF_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.bigquery.analyticshub.v1.JavaScriptUDF.class,
com.google.cloud.bigquery.analyticshub.v1.JavaScriptUDF.Builder.class);
}
// Construct using com.google.cloud.bigquery.analyticshub.v1.JavaScriptUDF.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
functionName_ = "";
code_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.bigquery.analyticshub.v1.PubsubProto
.internal_static_google_cloud_bigquery_analyticshub_v1_JavaScriptUDF_descriptor;
}
@java.lang.Override
public com.google.cloud.bigquery.analyticshub.v1.JavaScriptUDF getDefaultInstanceForType() {
return com.google.cloud.bigquery.analyticshub.v1.JavaScriptUDF.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.bigquery.analyticshub.v1.JavaScriptUDF build() {
com.google.cloud.bigquery.analyticshub.v1.JavaScriptUDF result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.bigquery.analyticshub.v1.JavaScriptUDF buildPartial() {
com.google.cloud.bigquery.analyticshub.v1.JavaScriptUDF result =
new com.google.cloud.bigquery.analyticshub.v1.JavaScriptUDF(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.bigquery.analyticshub.v1.JavaScriptUDF result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.functionName_ = functionName_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.code_ = code_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.bigquery.analyticshub.v1.JavaScriptUDF) {
return mergeFrom((com.google.cloud.bigquery.analyticshub.v1.JavaScriptUDF) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.bigquery.analyticshub.v1.JavaScriptUDF other) {
if (other == com.google.cloud.bigquery.analyticshub.v1.JavaScriptUDF.getDefaultInstance())
return this;
if (!other.getFunctionName().isEmpty()) {
functionName_ = other.functionName_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getCode().isEmpty()) {
code_ = other.code_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
functionName_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
code_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object functionName_ = "";
/**
*
*
* <pre>
* Required. Name of the JavasScript function that should applied to Pub/Sub
* messages.
* </pre>
*
* <code>string function_name = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The functionName.
*/
public java.lang.String getFunctionName() {
java.lang.Object ref = functionName_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
functionName_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. Name of the JavasScript function that should applied to Pub/Sub
* messages.
* </pre>
*
* <code>string function_name = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for functionName.
*/
public com.google.protobuf.ByteString getFunctionNameBytes() {
java.lang.Object ref = functionName_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
functionName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. Name of the JavasScript function that should applied to Pub/Sub
* messages.
* </pre>
*
* <code>string function_name = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The functionName to set.
* @return This builder for chaining.
*/
public Builder setFunctionName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
functionName_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Name of the JavasScript function that should applied to Pub/Sub
* messages.
* </pre>
*
* <code>string function_name = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearFunctionName() {
functionName_ = getDefaultInstance().getFunctionName();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Name of the JavasScript function that should applied to Pub/Sub
* messages.
* </pre>
*
* <code>string function_name = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for functionName to set.
* @return This builder for chaining.
*/
public Builder setFunctionNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
functionName_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object code_ = "";
/**
*
*
* <pre>
* Required. JavaScript code that contains a function `function_name` with the
* below signature:
*
* ```
* /**
* * Transforms a Pub/Sub message.
*
* * @return {(Object<string, (string | Object<string, string>)>|null)} - To
* * filter a message, return `null`. To transform a message return a map
* * with the following keys:
* * - (required) 'data' : {string}
* * - (optional) 'attributes' : {Object<string, string>}
* * Returning empty `attributes` will remove all attributes from the
* * message.
* *
* * @param {(Object<string, (string | Object<string, string>)>} Pub/Sub
* * message. Keys:
* * - (required) 'data' : {string}
* * - (required) 'attributes' : {Object<string, string>}
* *
* * @param {Object<string, any>} metadata - Pub/Sub message metadata.
* * Keys:
* * - (required) 'message_id' : {string}
* * - (optional) 'publish_time': {string} YYYY-MM-DDTHH:MM:SSZ format
* * - (optional) 'ordering_key': {string}
* */
*
* function <function_name>(message, metadata) {
* }
* ```
* </pre>
*
* <code>string code = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The code.
*/
public java.lang.String getCode() {
java.lang.Object ref = code_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
code_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. JavaScript code that contains a function `function_name` with the
* below signature:
*
* ```
* /**
* * Transforms a Pub/Sub message.
*
* * @return {(Object<string, (string | Object<string, string>)>|null)} - To
* * filter a message, return `null`. To transform a message return a map
* * with the following keys:
* * - (required) 'data' : {string}
* * - (optional) 'attributes' : {Object<string, string>}
* * Returning empty `attributes` will remove all attributes from the
* * message.
* *
* * @param {(Object<string, (string | Object<string, string>)>} Pub/Sub
* * message. Keys:
* * - (required) 'data' : {string}
* * - (required) 'attributes' : {Object<string, string>}
* *
* * @param {Object<string, any>} metadata - Pub/Sub message metadata.
* * Keys:
* * - (required) 'message_id' : {string}
* * - (optional) 'publish_time': {string} YYYY-MM-DDTHH:MM:SSZ format
* * - (optional) 'ordering_key': {string}
* */
*
* function <function_name>(message, metadata) {
* }
* ```
* </pre>
*
* <code>string code = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for code.
*/
public com.google.protobuf.ByteString getCodeBytes() {
java.lang.Object ref = code_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
code_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. JavaScript code that contains a function `function_name` with the
* below signature:
*
* ```
* /**
* * Transforms a Pub/Sub message.
*
* * @return {(Object<string, (string | Object<string, string>)>|null)} - To
* * filter a message, return `null`. To transform a message return a map
* * with the following keys:
* * - (required) 'data' : {string}
* * - (optional) 'attributes' : {Object<string, string>}
* * Returning empty `attributes` will remove all attributes from the
* * message.
* *
* * @param {(Object<string, (string | Object<string, string>)>} Pub/Sub
* * message. Keys:
* * - (required) 'data' : {string}
* * - (required) 'attributes' : {Object<string, string>}
* *
* * @param {Object<string, any>} metadata - Pub/Sub message metadata.
* * Keys:
* * - (required) 'message_id' : {string}
* * - (optional) 'publish_time': {string} YYYY-MM-DDTHH:MM:SSZ format
* * - (optional) 'ordering_key': {string}
* */
*
* function <function_name>(message, metadata) {
* }
* ```
* </pre>
*
* <code>string code = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The code to set.
* @return This builder for chaining.
*/
public Builder setCode(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
code_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. JavaScript code that contains a function `function_name` with the
* below signature:
*
* ```
* /**
* * Transforms a Pub/Sub message.
*
* * @return {(Object<string, (string | Object<string, string>)>|null)} - To
* * filter a message, return `null`. To transform a message return a map
* * with the following keys:
* * - (required) 'data' : {string}
* * - (optional) 'attributes' : {Object<string, string>}
* * Returning empty `attributes` will remove all attributes from the
* * message.
* *
* * @param {(Object<string, (string | Object<string, string>)>} Pub/Sub
* * message. Keys:
* * - (required) 'data' : {string}
* * - (required) 'attributes' : {Object<string, string>}
* *
* * @param {Object<string, any>} metadata - Pub/Sub message metadata.
* * Keys:
* * - (required) 'message_id' : {string}
* * - (optional) 'publish_time': {string} YYYY-MM-DDTHH:MM:SSZ format
* * - (optional) 'ordering_key': {string}
* */
*
* function <function_name>(message, metadata) {
* }
* ```
* </pre>
*
* <code>string code = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearCode() {
code_ = getDefaultInstance().getCode();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. JavaScript code that contains a function `function_name` with the
* below signature:
*
* ```
* /**
* * Transforms a Pub/Sub message.
*
* * @return {(Object<string, (string | Object<string, string>)>|null)} - To
* * filter a message, return `null`. To transform a message return a map
* * with the following keys:
* * - (required) 'data' : {string}
* * - (optional) 'attributes' : {Object<string, string>}
* * Returning empty `attributes` will remove all attributes from the
* * message.
* *
* * @param {(Object<string, (string | Object<string, string>)>} Pub/Sub
* * message. Keys:
* * - (required) 'data' : {string}
* * - (required) 'attributes' : {Object<string, string>}
* *
* * @param {Object<string, any>} metadata - Pub/Sub message metadata.
* * Keys:
* * - (required) 'message_id' : {string}
* * - (optional) 'publish_time': {string} YYYY-MM-DDTHH:MM:SSZ format
* * - (optional) 'ordering_key': {string}
* */
*
* function <function_name>(message, metadata) {
* }
* ```
* </pre>
*
* <code>string code = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for code to set.
* @return This builder for chaining.
*/
public Builder setCodeBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
code_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.bigquery.analyticshub.v1.JavaScriptUDF)
}
// @@protoc_insertion_point(class_scope:google.cloud.bigquery.analyticshub.v1.JavaScriptUDF)
private static final com.google.cloud.bigquery.analyticshub.v1.JavaScriptUDF DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.bigquery.analyticshub.v1.JavaScriptUDF();
}
public static com.google.cloud.bigquery.analyticshub.v1.JavaScriptUDF getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<JavaScriptUDF> PARSER =
new com.google.protobuf.AbstractParser<JavaScriptUDF>() {
@java.lang.Override
public JavaScriptUDF parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<JavaScriptUDF> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<JavaScriptUDF> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.bigquery.analyticshub.v1.JavaScriptUDF getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
openjdk/jdk8 | 35,214 | jdk/src/share/classes/java/lang/management/ManagementFactory.java | /*
* Copyright (c) 2003, 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package java.lang.management;
import javax.management.DynamicMBean;
import javax.management.MBeanServer;
import javax.management.MBeanServerConnection;
import javax.management.MBeanServerFactory;
import javax.management.MBeanServerPermission;
import javax.management.NotificationEmitter;
import javax.management.ObjectInstance;
import javax.management.ObjectName;
import javax.management.InstanceAlreadyExistsException;
import javax.management.InstanceNotFoundException;
import javax.management.MalformedObjectNameException;
import javax.management.MBeanRegistrationException;
import javax.management.NotCompliantMBeanException;
import javax.management.StandardEmitterMBean;
import javax.management.StandardMBean;
import java.util.Collections;
import java.util.List;
import java.util.Set;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.security.AccessController;
import java.security.Permission;
import java.security.PrivilegedAction;
import java.security.PrivilegedActionException;
import java.security.PrivilegedExceptionAction;
import javax.management.JMX;
import sun.management.ManagementFactoryHelper;
/**
* The {@code ManagementFactory} class is a factory class for getting
* managed beans for the Java platform.
* This class consists of static methods each of which returns
* one or more <i>platform MXBeans</i> representing
* the management interface of a component of the Java virtual
* machine.
*
* <h3><a name="MXBean">Platform MXBeans</a></h3>
* <p>
* A platform MXBean is a <i>managed bean</i> that
* conforms to the <a href="../../../javax/management/package-summary.html">JMX</a>
* Instrumentation Specification and only uses a set of basic data types.
* A JMX management application and the {@linkplain
* #getPlatformMBeanServer platform MBeanServer}
* can interoperate without requiring classes for MXBean specific
* data types.
* The data types being transmitted between the JMX connector
* server and the connector client are
* {@linkplain javax.management.openmbean.OpenType open types}
* and this allows interoperation across versions.
* See <a href="../../../javax/management/MXBean.html#MXBean-spec">
* the specification of MXBeans</a> for details.
*
* <a name="MXBeanNames"></a>
* <p>Each platform MXBean is a {@link PlatformManagedObject}
* and it has a unique
* {@link javax.management.ObjectName ObjectName} for
* registration in the platform {@code MBeanServer} as returned by
* by the {@link PlatformManagedObject#getObjectName getObjectName}
* method.
*
* <p>
* An application can access a platform MXBean in the following ways:
* <h4>1. Direct access to an MXBean interface</h4>
* <blockquote>
* <ul>
* <li>Get an MXBean instance by calling the
* {@link #getPlatformMXBean(Class) getPlatformMXBean} or
* {@link #getPlatformMXBeans(Class) getPlatformMXBeans} method
* and access the MXBean locally in the running
* virtual machine.
* </li>
* <li>Construct an MXBean proxy instance that forwards the
* method calls to a given {@link MBeanServer MBeanServer} by calling
* the {@link #getPlatformMXBean(MBeanServerConnection, Class)} or
* {@link #getPlatformMXBeans(MBeanServerConnection, Class)} method.
* The {@link #newPlatformMXBeanProxy newPlatformMXBeanProxy} method
* can also be used to construct an MXBean proxy instance of
* a given {@code ObjectName}.
* A proxy is typically constructed to remotely access
* an MXBean of another running virtual machine.
* </li>
* </ul>
* <h4>2. Indirect access to an MXBean interface via MBeanServer</h4>
* <ul>
* <li>Go through the platform {@code MBeanServer} to access MXBeans
* locally or a specific <tt>MBeanServerConnection</tt> to access
* MXBeans remotely.
* The attributes and operations of an MXBean use only
* <em>JMX open types</em> which include basic data types,
* {@link javax.management.openmbean.CompositeData CompositeData},
* and {@link javax.management.openmbean.TabularData TabularData}
* defined in
* {@link javax.management.openmbean.OpenType OpenType}.
* The mapping is specified in
* the {@linkplain javax.management.MXBean MXBean} specification
* for details.
* </li>
* </ul>
* </blockquote>
*
* <p>
* The {@link #getPlatformManagementInterfaces getPlatformManagementInterfaces}
* method returns all management interfaces supported in the Java virtual machine
* including the standard management interfaces listed in the tables
* below as well as the management interfaces extended by the JDK implementation.
* <p>
* A Java virtual machine has a single instance of the following management
* interfaces:
*
* <blockquote>
* <table border summary="The list of Management Interfaces and their single instances">
* <tr>
* <th>Management Interface</th>
* <th>ObjectName</th>
* </tr>
* <tr>
* <td> {@link ClassLoadingMXBean} </td>
* <td> {@link #CLASS_LOADING_MXBEAN_NAME
* java.lang:type=ClassLoading}</td>
* </tr>
* <tr>
* <td> {@link MemoryMXBean} </td>
* <td> {@link #MEMORY_MXBEAN_NAME
* java.lang:type=Memory}</td>
* </tr>
* <tr>
* <td> {@link ThreadMXBean} </td>
* <td> {@link #THREAD_MXBEAN_NAME
* java.lang:type=Threading}</td>
* </tr>
* <tr>
* <td> {@link RuntimeMXBean} </td>
* <td> {@link #RUNTIME_MXBEAN_NAME
* java.lang:type=Runtime}</td>
* </tr>
* <tr>
* <td> {@link OperatingSystemMXBean} </td>
* <td> {@link #OPERATING_SYSTEM_MXBEAN_NAME
* java.lang:type=OperatingSystem}</td>
* </tr>
* <tr>
* <td> {@link PlatformLoggingMXBean} </td>
* <td> {@link java.util.logging.LogManager#LOGGING_MXBEAN_NAME
* java.util.logging:type=Logging}</td>
* </tr>
* </table>
* </blockquote>
*
* <p>
* A Java virtual machine has zero or a single instance of
* the following management interfaces.
*
* <blockquote>
* <table border summary="The list of Management Interfaces and their single instances">
* <tr>
* <th>Management Interface</th>
* <th>ObjectName</th>
* </tr>
* <tr>
* <td> {@link CompilationMXBean} </td>
* <td> {@link #COMPILATION_MXBEAN_NAME
* java.lang:type=Compilation}</td>
* </tr>
* </table>
* </blockquote>
*
* <p>
* A Java virtual machine may have one or more instances of the following
* management interfaces.
* <blockquote>
* <table border summary="The list of Management Interfaces and their single instances">
* <tr>
* <th>Management Interface</th>
* <th>ObjectName</th>
* </tr>
* <tr>
* <td> {@link GarbageCollectorMXBean} </td>
* <td> {@link #GARBAGE_COLLECTOR_MXBEAN_DOMAIN_TYPE
* java.lang:type=GarbageCollector}<tt>,name=</tt><i>collector's name</i></td>
* </tr>
* <tr>
* <td> {@link MemoryManagerMXBean} </td>
* <td> {@link #MEMORY_MANAGER_MXBEAN_DOMAIN_TYPE
* java.lang:type=MemoryManager}<tt>,name=</tt><i>manager's name</i></td>
* </tr>
* <tr>
* <td> {@link MemoryPoolMXBean} </td>
* <td> {@link #MEMORY_POOL_MXBEAN_DOMAIN_TYPE
* java.lang:type=MemoryPool}<tt>,name=</tt><i>pool's name</i></td>
* </tr>
* <tr>
* <td> {@link BufferPoolMXBean} </td>
* <td> {@code java.nio:type=BufferPool,name=}<i>pool name</i></td>
* </tr>
* </table>
* </blockquote>
*
* @see <a href="../../../javax/management/package-summary.html">
* JMX Specification</a>
* @see <a href="package-summary.html#examples">
* Ways to Access Management Metrics</a>
* @see javax.management.MXBean
*
* @author Mandy Chung
* @since 1.5
*/
public class ManagementFactory {
// A class with only static fields and methods.
private ManagementFactory() {};
/**
* String representation of the
* <tt>ObjectName</tt> for the {@link ClassLoadingMXBean}.
*/
public final static String CLASS_LOADING_MXBEAN_NAME =
"java.lang:type=ClassLoading";
/**
* String representation of the
* <tt>ObjectName</tt> for the {@link CompilationMXBean}.
*/
public final static String COMPILATION_MXBEAN_NAME =
"java.lang:type=Compilation";
/**
* String representation of the
* <tt>ObjectName</tt> for the {@link MemoryMXBean}.
*/
public final static String MEMORY_MXBEAN_NAME =
"java.lang:type=Memory";
/**
* String representation of the
* <tt>ObjectName</tt> for the {@link OperatingSystemMXBean}.
*/
public final static String OPERATING_SYSTEM_MXBEAN_NAME =
"java.lang:type=OperatingSystem";
/**
* String representation of the
* <tt>ObjectName</tt> for the {@link RuntimeMXBean}.
*/
public final static String RUNTIME_MXBEAN_NAME =
"java.lang:type=Runtime";
/**
* String representation of the
* <tt>ObjectName</tt> for the {@link ThreadMXBean}.
*/
public final static String THREAD_MXBEAN_NAME =
"java.lang:type=Threading";
/**
* The domain name and the type key property in
* the <tt>ObjectName</tt> for a {@link GarbageCollectorMXBean}.
* The unique <tt>ObjectName</tt> for a <tt>GarbageCollectorMXBean</tt>
* can be formed by appending this string with
* "<tt>,name=</tt><i>collector's name</i>".
*/
public final static String GARBAGE_COLLECTOR_MXBEAN_DOMAIN_TYPE =
"java.lang:type=GarbageCollector";
/**
* The domain name and the type key property in
* the <tt>ObjectName</tt> for a {@link MemoryManagerMXBean}.
* The unique <tt>ObjectName</tt> for a <tt>MemoryManagerMXBean</tt>
* can be formed by appending this string with
* "<tt>,name=</tt><i>manager's name</i>".
*/
public final static String MEMORY_MANAGER_MXBEAN_DOMAIN_TYPE=
"java.lang:type=MemoryManager";
/**
* The domain name and the type key property in
* the <tt>ObjectName</tt> for a {@link MemoryPoolMXBean}.
* The unique <tt>ObjectName</tt> for a <tt>MemoryPoolMXBean</tt>
* can be formed by appending this string with
* <tt>,name=</tt><i>pool's name</i>.
*/
public final static String MEMORY_POOL_MXBEAN_DOMAIN_TYPE=
"java.lang:type=MemoryPool";
/**
* Returns the managed bean for the class loading system of
* the Java virtual machine.
*
* @return a {@link ClassLoadingMXBean} object for
* the Java virtual machine.
*/
public static ClassLoadingMXBean getClassLoadingMXBean() {
return ManagementFactoryHelper.getClassLoadingMXBean();
}
/**
* Returns the managed bean for the memory system of
* the Java virtual machine.
*
* @return a {@link MemoryMXBean} object for the Java virtual machine.
*/
public static MemoryMXBean getMemoryMXBean() {
return ManagementFactoryHelper.getMemoryMXBean();
}
/**
* Returns the managed bean for the thread system of
* the Java virtual machine.
*
* @return a {@link ThreadMXBean} object for the Java virtual machine.
*/
public static ThreadMXBean getThreadMXBean() {
return ManagementFactoryHelper.getThreadMXBean();
}
/**
* Returns the managed bean for the runtime system of
* the Java virtual machine.
*
* @return a {@link RuntimeMXBean} object for the Java virtual machine.
*/
public static RuntimeMXBean getRuntimeMXBean() {
return ManagementFactoryHelper.getRuntimeMXBean();
}
/**
* Returns the managed bean for the compilation system of
* the Java virtual machine. This method returns <tt>null</tt>
* if the Java virtual machine has no compilation system.
*
* @return a {@link CompilationMXBean} object for the Java virtual
* machine or <tt>null</tt> if the Java virtual machine has
* no compilation system.
*/
public static CompilationMXBean getCompilationMXBean() {
return ManagementFactoryHelper.getCompilationMXBean();
}
/**
* Returns the managed bean for the operating system on which
* the Java virtual machine is running.
*
* @return an {@link OperatingSystemMXBean} object for
* the Java virtual machine.
*/
public static OperatingSystemMXBean getOperatingSystemMXBean() {
return ManagementFactoryHelper.getOperatingSystemMXBean();
}
/**
* Returns a list of {@link MemoryPoolMXBean} objects in the
* Java virtual machine.
* The Java virtual machine can have one or more memory pools.
* It may add or remove memory pools during execution.
*
* @return a list of <tt>MemoryPoolMXBean</tt> objects.
*
*/
public static List<MemoryPoolMXBean> getMemoryPoolMXBeans() {
return ManagementFactoryHelper.getMemoryPoolMXBeans();
}
/**
* Returns a list of {@link MemoryManagerMXBean} objects
* in the Java virtual machine.
* The Java virtual machine can have one or more memory managers.
* It may add or remove memory managers during execution.
*
* @return a list of <tt>MemoryManagerMXBean</tt> objects.
*
*/
public static List<MemoryManagerMXBean> getMemoryManagerMXBeans() {
return ManagementFactoryHelper.getMemoryManagerMXBeans();
}
/**
* Returns a list of {@link GarbageCollectorMXBean} objects
* in the Java virtual machine.
* The Java virtual machine may have one or more
* <tt>GarbageCollectorMXBean</tt> objects.
* It may add or remove <tt>GarbageCollectorMXBean</tt>
* during execution.
*
* @return a list of <tt>GarbageCollectorMXBean</tt> objects.
*
*/
public static List<GarbageCollectorMXBean> getGarbageCollectorMXBeans() {
return ManagementFactoryHelper.getGarbageCollectorMXBeans();
}
private static MBeanServer platformMBeanServer;
/**
* Returns the platform {@link javax.management.MBeanServer MBeanServer}.
* On the first call to this method, it first creates the platform
* {@code MBeanServer} by calling the
* {@link javax.management.MBeanServerFactory#createMBeanServer
* MBeanServerFactory.createMBeanServer}
* method and registers each platform MXBean in this platform
* {@code MBeanServer} with its
* {@link PlatformManagedObject#getObjectName ObjectName}.
* This method, in subsequent calls, will simply return the
* initially created platform {@code MBeanServer}.
* <p>
* MXBeans that get created and destroyed dynamically, for example,
* memory {@link MemoryPoolMXBean pools} and
* {@link MemoryManagerMXBean managers},
* will automatically be registered and deregistered into the platform
* {@code MBeanServer}.
* <p>
* If the system property {@code javax.management.builder.initial}
* is set, the platform {@code MBeanServer} creation will be done
* by the specified {@link javax.management.MBeanServerBuilder}.
* <p>
* It is recommended that this platform MBeanServer also be used
* to register other application managed beans
* besides the platform MXBeans.
* This will allow all MBeans to be published through the same
* {@code MBeanServer} and hence allow for easier network publishing
* and discovery.
* Name conflicts with the platform MXBeans should be avoided.
*
* @return the platform {@code MBeanServer}; the platform
* MXBeans are registered into the platform {@code MBeanServer}
* at the first time this method is called.
*
* @exception SecurityException if there is a security manager
* and the caller does not have the permission required by
* {@link javax.management.MBeanServerFactory#createMBeanServer}.
*
* @see javax.management.MBeanServerFactory
* @see javax.management.MBeanServerFactory#createMBeanServer
*/
public static synchronized MBeanServer getPlatformMBeanServer() {
SecurityManager sm = System.getSecurityManager();
if (sm != null) {
Permission perm = new MBeanServerPermission("createMBeanServer");
sm.checkPermission(perm);
}
if (platformMBeanServer == null) {
platformMBeanServer = MBeanServerFactory.createMBeanServer();
for (PlatformComponent pc : PlatformComponent.values()) {
List<? extends PlatformManagedObject> list =
pc.getMXBeans(pc.getMXBeanInterface());
for (PlatformManagedObject o : list) {
// Each PlatformComponent represents one management
// interface. Some MXBean may extend another one.
// The MXBean instances for one platform component
// (returned by pc.getMXBeans()) might be also
// the MXBean instances for another platform component.
// e.g. com.sun.management.GarbageCollectorMXBean
//
// So need to check if an MXBean instance is registered
// before registering into the platform MBeanServer
if (!platformMBeanServer.isRegistered(o.getObjectName())) {
addMXBean(platformMBeanServer, o);
}
}
}
HashMap<ObjectName, DynamicMBean> dynmbeans =
ManagementFactoryHelper.getPlatformDynamicMBeans();
for (Map.Entry<ObjectName, DynamicMBean> e : dynmbeans.entrySet()) {
addDynamicMBean(platformMBeanServer, e.getValue(), e.getKey());
}
}
return platformMBeanServer;
}
/**
* Returns a proxy for a platform MXBean interface of a
* given <a href="#MXBeanNames">MXBean name</a>
* that forwards its method calls through the given
* <tt>MBeanServerConnection</tt>.
*
* <p>This method is equivalent to:
* <blockquote>
* {@link java.lang.reflect.Proxy#newProxyInstance
* Proxy.newProxyInstance}<tt>(mxbeanInterface.getClassLoader(),
* new Class[] { mxbeanInterface }, handler)</tt>
* </blockquote>
*
* where <tt>handler</tt> is an {@link java.lang.reflect.InvocationHandler
* InvocationHandler} to which method invocations to the MXBean interface
* are dispatched. This <tt>handler</tt> converts an input parameter
* from an MXBean data type to its mapped open type before forwarding
* to the <tt>MBeanServer</tt> and converts a return value from
* an MXBean method call through the <tt>MBeanServer</tt>
* from an open type to the corresponding return type declared in
* the MXBean interface.
*
* <p>
* If the MXBean is a notification emitter (i.e.,
* it implements
* {@link javax.management.NotificationEmitter NotificationEmitter}),
* both the <tt>mxbeanInterface</tt> and <tt>NotificationEmitter</tt>
* will be implemented by this proxy.
*
* <p>
* <b>Notes:</b>
* <ol>
* <li>Using an MXBean proxy is a convenience remote access to
* a platform MXBean of a running virtual machine. All method
* calls to the MXBean proxy are forwarded to an
* <tt>MBeanServerConnection</tt> where
* {@link java.io.IOException IOException} may be thrown
* when the communication problem occurs with the connector server.
* An application remotely accesses the platform MXBeans using
* proxy should prepare to catch <tt>IOException</tt> as if
* accessing with the <tt>MBeanServerConnector</tt> interface.</li>
*
* <li>When a client application is designed to remotely access MXBeans
* for a running virtual machine whose version is different than
* the version on which the application is running,
* it should prepare to catch
* {@link java.io.InvalidObjectException InvalidObjectException}
* which is thrown when an MXBean proxy receives a name of an
* enum constant which is missing in the enum class loaded in
* the client application. </li>
*
* <li>{@link javax.management.MBeanServerInvocationHandler
* MBeanServerInvocationHandler} or its
* {@link javax.management.MBeanServerInvocationHandler#newProxyInstance
* newProxyInstance} method cannot be used to create
* a proxy for a platform MXBean. The proxy object created
* by <tt>MBeanServerInvocationHandler</tt> does not handle
* the properties of the platform MXBeans described in
* the <a href="#MXBean">class specification</a>.
*</li>
* </ol>
*
* @param connection the <tt>MBeanServerConnection</tt> to forward to.
* @param mxbeanName the name of a platform MXBean within
* <tt>connection</tt> to forward to. <tt>mxbeanName</tt> must be
* in the format of {@link ObjectName ObjectName}.
* @param mxbeanInterface the MXBean interface to be implemented
* by the proxy.
* @param <T> an {@code mxbeanInterface} type parameter
*
* @return a proxy for a platform MXBean interface of a
* given <a href="#MXBeanNames">MXBean name</a>
* that forwards its method calls through the given
* <tt>MBeanServerConnection</tt>, or {@code null} if not exist.
*
* @throws IllegalArgumentException if
* <ul>
* <li><tt>mxbeanName</tt> is not with a valid
* {@link ObjectName ObjectName} format, or</li>
* <li>the named MXBean in the <tt>connection</tt> is
* not a MXBean provided by the platform, or</li>
* <li>the named MXBean is not registered in the
* <tt>MBeanServerConnection</tt>, or</li>
* <li>the named MXBean is not an instance of the given
* <tt>mxbeanInterface</tt></li>
* </ul>
*
* @throws java.io.IOException if a communication problem
* occurred when accessing the <tt>MBeanServerConnection</tt>.
*/
public static <T> T
newPlatformMXBeanProxy(MBeanServerConnection connection,
String mxbeanName,
Class<T> mxbeanInterface)
throws java.io.IOException {
// Only allow MXBean interfaces from rt.jar loaded by the
// bootstrap class loader
final Class<?> cls = mxbeanInterface;
ClassLoader loader =
AccessController.doPrivileged(new PrivilegedAction<ClassLoader>() {
public ClassLoader run() {
return cls.getClassLoader();
}
});
if (!sun.misc.VM.isSystemDomainLoader(loader)) {
throw new IllegalArgumentException(mxbeanName +
" is not a platform MXBean");
}
try {
final ObjectName objName = new ObjectName(mxbeanName);
// skip the isInstanceOf check for LoggingMXBean
String intfName = mxbeanInterface.getName();
if (!connection.isInstanceOf(objName, intfName)) {
throw new IllegalArgumentException(mxbeanName +
" is not an instance of " + mxbeanInterface);
}
final Class[] interfaces;
// check if the registered MBean is a notification emitter
boolean emitter = connection.isInstanceOf(objName, NOTIF_EMITTER);
// create an MXBean proxy
return JMX.newMXBeanProxy(connection, objName, mxbeanInterface,
emitter);
} catch (InstanceNotFoundException|MalformedObjectNameException e) {
throw new IllegalArgumentException(e);
}
}
/**
* Returns the platform MXBean implementing
* the given {@code mxbeanInterface} which is specified
* to have one single instance in the Java virtual machine.
* This method may return {@code null} if the management interface
* is not implemented in the Java virtual machine (for example,
* a Java virtual machine with no compilation system does not
* implement {@link CompilationMXBean});
* otherwise, this method is equivalent to calling:
* <pre>
* {@link #getPlatformMXBeans(Class)
* getPlatformMXBeans(mxbeanInterface)}.get(0);
* </pre>
*
* @param mxbeanInterface a management interface for a platform
* MXBean with one single instance in the Java virtual machine
* if implemented.
* @param <T> an {@code mxbeanInterface} type parameter
*
* @return the platform MXBean that implements
* {@code mxbeanInterface}, or {@code null} if not exist.
*
* @throws IllegalArgumentException if {@code mxbeanInterface}
* is not a platform management interface or
* not a singleton platform MXBean.
*
* @since 1.7
*/
public static <T extends PlatformManagedObject>
T getPlatformMXBean(Class<T> mxbeanInterface) {
PlatformComponent pc = PlatformComponent.getPlatformComponent(mxbeanInterface);
if (pc == null)
throw new IllegalArgumentException(mxbeanInterface.getName() +
" is not a platform management interface");
if (!pc.isSingleton())
throw new IllegalArgumentException(mxbeanInterface.getName() +
" can have zero or more than one instances");
return pc.getSingletonMXBean(mxbeanInterface);
}
/**
* Returns the list of platform MXBeans implementing
* the given {@code mxbeanInterface} in the Java
* virtual machine.
* The returned list may contain zero, one, or more instances.
* The number of instances in the returned list is defined
* in the specification of the given management interface.
* The order is undefined and there is no guarantee that
* the list returned is in the same order as previous invocations.
*
* @param mxbeanInterface a management interface for a platform
* MXBean
* @param <T> an {@code mxbeanInterface} type parameter
*
* @return the list of platform MXBeans that implement
* {@code mxbeanInterface}.
*
* @throws IllegalArgumentException if {@code mxbeanInterface}
* is not a platform management interface.
*
* @since 1.7
*/
public static <T extends PlatformManagedObject> List<T>
getPlatformMXBeans(Class<T> mxbeanInterface) {
PlatformComponent pc = PlatformComponent.getPlatformComponent(mxbeanInterface);
if (pc == null)
throw new IllegalArgumentException(mxbeanInterface.getName() +
" is not a platform management interface");
return Collections.unmodifiableList(pc.getMXBeans(mxbeanInterface));
}
/**
* Returns the platform MXBean proxy for
* {@code mxbeanInterface} which is specified to have one single
* instance in a Java virtual machine and the proxy will
* forward the method calls through the given {@code MBeanServerConnection}.
* This method may return {@code null} if the management interface
* is not implemented in the Java virtual machine being monitored
* (for example, a Java virtual machine with no compilation system
* does not implement {@link CompilationMXBean});
* otherwise, this method is equivalent to calling:
* <pre>
* {@link #getPlatformMXBeans(MBeanServerConnection, Class)
* getPlatformMXBeans(connection, mxbeanInterface)}.get(0);
* </pre>
*
* @param connection the {@code MBeanServerConnection} to forward to.
* @param mxbeanInterface a management interface for a platform
* MXBean with one single instance in the Java virtual machine
* being monitored, if implemented.
* @param <T> an {@code mxbeanInterface} type parameter
*
* @return the platform MXBean proxy for
* forwarding the method calls of the {@code mxbeanInterface}
* through the given {@code MBeanServerConnection},
* or {@code null} if not exist.
*
* @throws IllegalArgumentException if {@code mxbeanInterface}
* is not a platform management interface or
* not a singleton platform MXBean.
* @throws java.io.IOException if a communication problem
* occurred when accessing the {@code MBeanServerConnection}.
*
* @see #newPlatformMXBeanProxy
* @since 1.7
*/
public static <T extends PlatformManagedObject>
T getPlatformMXBean(MBeanServerConnection connection,
Class<T> mxbeanInterface)
throws java.io.IOException
{
PlatformComponent pc = PlatformComponent.getPlatformComponent(mxbeanInterface);
if (pc == null)
throw new IllegalArgumentException(mxbeanInterface.getName() +
" is not a platform management interface");
if (!pc.isSingleton())
throw new IllegalArgumentException(mxbeanInterface.getName() +
" can have zero or more than one instances");
return pc.getSingletonMXBean(connection, mxbeanInterface);
}
/**
* Returns the list of the platform MXBean proxies for
* forwarding the method calls of the {@code mxbeanInterface}
* through the given {@code MBeanServerConnection}.
* The returned list may contain zero, one, or more instances.
* The number of instances in the returned list is defined
* in the specification of the given management interface.
* The order is undefined and there is no guarantee that
* the list returned is in the same order as previous invocations.
*
* @param connection the {@code MBeanServerConnection} to forward to.
* @param mxbeanInterface a management interface for a platform
* MXBean
* @param <T> an {@code mxbeanInterface} type parameter
*
* @return the list of platform MXBean proxies for
* forwarding the method calls of the {@code mxbeanInterface}
* through the given {@code MBeanServerConnection}.
*
* @throws IllegalArgumentException if {@code mxbeanInterface}
* is not a platform management interface.
*
* @throws java.io.IOException if a communication problem
* occurred when accessing the {@code MBeanServerConnection}.
*
* @see #newPlatformMXBeanProxy
* @since 1.7
*/
public static <T extends PlatformManagedObject>
List<T> getPlatformMXBeans(MBeanServerConnection connection,
Class<T> mxbeanInterface)
throws java.io.IOException
{
PlatformComponent pc = PlatformComponent.getPlatformComponent(mxbeanInterface);
if (pc == null) {
throw new IllegalArgumentException(mxbeanInterface.getName() +
" is not a platform management interface");
}
return Collections.unmodifiableList(pc.getMXBeans(connection, mxbeanInterface));
}
/**
* Returns the set of {@code Class} objects, subinterface of
* {@link PlatformManagedObject}, representing
* all management interfaces for
* monitoring and managing the Java platform.
*
* @return the set of {@code Class} objects, subinterface of
* {@link PlatformManagedObject} representing
* the management interfaces for
* monitoring and managing the Java platform.
*
* @since 1.7
*/
public static Set<Class<? extends PlatformManagedObject>>
getPlatformManagementInterfaces()
{
Set<Class<? extends PlatformManagedObject>> result =
new HashSet<>();
for (PlatformComponent component: PlatformComponent.values()) {
result.add(component.getMXBeanInterface());
}
return Collections.unmodifiableSet(result);
}
private static final String NOTIF_EMITTER =
"javax.management.NotificationEmitter";
/**
* Registers an MXBean.
*/
private static void addMXBean(final MBeanServer mbs, final PlatformManagedObject pmo) {
// Make DynamicMBean out of MXBean by wrapping it with a StandardMBean
try {
AccessController.doPrivileged(new PrivilegedExceptionAction<Void>() {
public Void run() throws InstanceAlreadyExistsException,
MBeanRegistrationException,
NotCompliantMBeanException {
final DynamicMBean dmbean;
if (pmo instanceof DynamicMBean) {
dmbean = DynamicMBean.class.cast(pmo);
} else if (pmo instanceof NotificationEmitter) {
dmbean = new StandardEmitterMBean(pmo, null, true, (NotificationEmitter) pmo);
} else {
dmbean = new StandardMBean(pmo, null, true);
}
mbs.registerMBean(dmbean, pmo.getObjectName());
return null;
}
});
} catch (PrivilegedActionException e) {
throw new RuntimeException(e.getException());
}
}
/**
* Registers a DynamicMBean.
*/
private static void addDynamicMBean(final MBeanServer mbs,
final DynamicMBean dmbean,
final ObjectName on) {
try {
AccessController.doPrivileged(new PrivilegedExceptionAction<Void>() {
@Override
public Void run() throws InstanceAlreadyExistsException,
MBeanRegistrationException,
NotCompliantMBeanException {
mbs.registerMBean(dmbean, on);
return null;
}
});
} catch (PrivilegedActionException e) {
throw new RuntimeException(e.getException());
}
}
}
|
oracle/coherence | 34,845 | prj/coherence-core-components/src/main/java/com/tangosol/coherence/component/net/Lease.java |
/*
* Copyright (c) 2000, 2024, Oracle and/or its affiliates.
*
* Licensed under the Universal Permissive License v 1.0 as shown at
* https://oss.oracle.com/licenses/upl.
*/
// ---- class: com.tangosol.coherence.component.net.Lease
package com.tangosol.coherence.component.net;
import com.tangosol.coherence.component.net.MemberSet;
import com.tangosol.coherence.component.util.daemon.queueProcessor.service.grid.ReplicatedCache;
import com.tangosol.net.NamedCache;
import com.tangosol.util.Base;
import com.tangosol.util.ExternalizableHelper;
import com.tangosol.util.WrapperException;
import java.util.Date;
import java.util.concurrent.atomic.AtomicLong;
/**
* The Lease represents an expirable exclusive posession of a resource shared
* across a cluster. None of the property accessors are synchronized and
* usually require extrenal synchronization. The synchronized methods are
* <code>lock(), validate(), copyFrom(Lease)</code>.
*/
@SuppressWarnings({"deprecation", "rawtypes", "unused", "unchecked", "ConstantConditions", "DuplicatedCode", "ForLoopReplaceableByForEach", "IfCanBeSwitch", "RedundantArrayCreation", "RedundantSuppression", "SameParameterValue", "TryFinallyCanBeTryWithResources", "TryWithIdenticalCatches", "UnnecessaryBoxing", "UnnecessaryUnboxing", "UnusedAssignment"})
public class Lease
extends com.tangosol.coherence.component.Net
implements Cloneable
{
// ---- Fields declarations ----
/**
* Property BY_MEMBER
*
* Indicates a Member based lease ownership.
*/
public static final int BY_MEMBER = 1;
/**
* Property BY_THREAD
*
* Indicates a Thread (per Member) based lease ownership.
*/
public static final int BY_THREAD = 0;
/**
* Property CacheIndex
*
* The index of the cache that this Lease belongs to. This index is used by
* the cache service to get to an appropriate cache [handler].
*/
private int __m_CacheIndex;
/**
* Property ClassLoader
*
* The ClassLoader that the corresponding resource is known to be
* assosiated with.
*/
private transient ClassLoader __m_ClassLoader;
/**
* Property EffectiveTime
*
* Cluster time that the Lease was locked at.
*/
private long __m_EffectiveTime;
/**
* Property ExpirationTime
*
* Cluster time that the Lease expires at.
*/
private long __m_ExpirationTime;
/**
* Property HolderId
*
* Member id of the holder for this Lease. Value of 0 indicates that
* currently there is no holder for this Lease.
*/
private transient int __m_HolderId;
/**
* Property HolderThreadId
*
* Unique id of the Thread which holds a lock for this Lease. This property
* has meaning only if the following holds true:
* <pre><code>
* getHolderId() == getService().getThisMember().getId()
* </code></pre>
*/
private long __m_HolderThreadId;
/**
* Property IssuerId
*
* Member id of the issuer (registrar) for this Lease. Value of 0 indicates
* that currently there is no issuer for this Lease.
*/
private transient int __m_IssuerId;
/**
* Property LEASE_AVAILABLE
*
* Indicates that a resource is known to be available.
*/
public static final int LEASE_AVAILABLE = 2;
/**
* Property LEASE_DIRTY
*
* Indicates that another Member of the Cluster currently holds a Lease for
* a resource.
*/
public static final int LEASE_DIRTY = 4;
/**
* Property LEASE_LOCKED
*
* Indicates that this Member of the Cluster currently holds a Lease for a
* resource.
*/
public static final int LEASE_LOCKED = 3;
/**
* Property LEASE_UNISSUED
*
* Indicates that the Lease issuer is gone.
*/
public static final int LEASE_UNISSUED = 1;
/**
* Property LEASE_UNKNOWN
*
* Indicates that there is no known Lease for a resource.
*/
public static final int LEASE_UNKNOWN = 0;
/**
* Property LeaseVersion
*
* The version of the Lease. It is intended to be used to resolve
* simultaneous conflicting requests.
*
* The value of LeaseVersion is in a range of 0..255, where value of zero
* represents a not existing lease and value of one represents a newly
* inserted lease.
*/
private int __m_LeaseVersion;
/**
* Property ResourceKey
*
* Key for the resource represented by this Lease. This property is set
* during initialization only.
*
* @see #instantiate()
*/
private Object __m_ResourceKey;
/**
* Property ResourceSize
*
* The size of the (serialized) resource represented by this Lease in
* bytes. It is inteneded to be used by the cache implementations that have
* automatic purge strategies that are based on the resource "weight".
* This property is calculated asynchronously and is not guaranteed to
* carry the precise value at all times. The value of -1 indicates that the
* resource has not yet been deserialized.
*/
private int __m_ResourceSize;
/**
* Property ResourceVersion
*
* The version of the resource represented by this Lease. It is intended to
* be used in the optimistic scenarios that do not "lock" prior to the
* resource updates, but instead discard the "outdated" update requests.
*
* The value of ResourceVersion is in a range of 0..255, where value of
* zero represents a not existing resource and value of one represents a
* newly inserted resource.
*/
private int __m_ResourceVersion;
/**
* Property Service
*
* Service object handling this Lease. This property is set during
* initialization only.
*
* @see #instantiateLease()
*/
private transient com.tangosol.coherence.component.util.daemon.queueProcessor.service.Grid __m_Service;
/**
* Property ThreadIdCounter
*
* Atomic counter used to assign unique thread identifiers.
*/
private static transient java.util.concurrent.atomic.AtomicLong __s_ThreadIdCounter;
/**
* Property ThreadIdHolder
*
* ThreadLocal object holding unique thread identifiers.
*/
private static transient ThreadLocal __s_ThreadIdHolder;
private static void _initStatic$Default()
{
__initStatic();
}
// Static initializer (from _initStatic)
static
{
// import java.util.concurrent.atomic.AtomicLong;
_initStatic$Default();
setThreadIdCounter(new AtomicLong());
}
// Default static initializer
private static void __initStatic()
{
// state initialization: static properties
try
{
__s_ThreadIdHolder = new java.lang.ThreadLocal();
}
catch (java.lang.Exception e)
{
// re-throw as a runtime exception
throw new com.tangosol.util.WrapperException(e);
}
}
// Default constructor
public Lease()
{
this(null, null, true);
}
// Initializing constructor
public Lease(String sName, com.tangosol.coherence.Component compParent, boolean fInit)
{
super(sName, compParent, false);
if (fInit)
{
__init();
}
}
// Main initializer
public void __init()
{
// private initialization
__initPrivate();
// state initialization: public and protected properties
try
{
setCacheIndex(-1);
setResourceSize(-1);
}
catch (java.lang.Exception e)
{
// re-throw as a runtime exception
throw new com.tangosol.util.WrapperException(e);
}
// signal the end of the initialization
set_Constructed(true);
}
// Private initializer
protected void __initPrivate()
{
super.__initPrivate();
}
//++ getter for static property _Instance
/**
* Getter for property _Instance.<p>
* Auto generated
*/
public static com.tangosol.coherence.Component get_Instance()
{
return new com.tangosol.coherence.component.net.Lease();
}
//++ getter for static property _CLASS
/**
* Getter for property _CLASS.<p>
* Property with auto-generated accessor that returns the Class object for a
* given component.
*/
public static Class get_CLASS()
{
Class clz;
try
{
clz = Class.forName("com.tangosol.coherence/component/net/Lease".replace('/', '.'));
}
catch (ClassNotFoundException e)
{
throw new NoClassDefFoundError(e.getMessage());
}
return clz;
}
//++ getter for autogen property _Module
/**
* This is an auto-generated method that returns the global [design time]
* parent component.
*
* Note: the class generator will ignore any custom implementation for this
* behavior.
*/
private com.tangosol.coherence.Component get_Module()
{
return this;
}
// Declared at the super level
public synchronized Object clone()
{
// import com.tangosol.util.WrapperException;
try
{
return super.clone();
}
catch (CloneNotSupportedException e)
{
throw new WrapperException(e);
}
}
// Declared at the super level
/**
* Compares this Lease object with the specified Lease object with an intent
* to figure out which lease is more "up-to-date". Returns a negative
* integer, zero, or a positive integer if this Lease is outdated, equally
* dated, or newer than the specified Lease.
*
* @param o the Lease object to be compared.
* @return a negative integer, zero, or a positive integer as this Lease
* information is older than, equal to, or newer than the specified lease.
*
* @throws ClassCastException if the specified object's type prevents it
* from being compared to this Lease.
*/
public int compareTo(Object o)
{
if (o == this)
{
return 0;
}
Lease leaseThis = this;
Lease leaseThat = (Lease) o;
// versions are circular (2 .. 128 .. 255)
// 0 represents a not existing lease or resource
// 1 represents a newly inserted lease or resource
int nVersionThis = leaseThis.getLeaseVersion();
int nVersionThat = leaseThat.getLeaseVersion();
if (nVersionThis == nVersionThat) // commented out in build 59: || nVersionThis == 1 || nVersionThat == 1)
{
nVersionThis = leaseThis.getResourceVersion();
nVersionThat = leaseThat.getResourceVersion();
if (nVersionThis == nVersionThat)
{
long lTimeThis = leaseThis.getEffectiveTime();
long lTimeThat = leaseThat.getEffectiveTime();
return lTimeThis == lTimeThat ? 0 :
lTimeThis > lTimeThat ? +1 : -1;
}
}
if (nVersionThis > nVersionThat)
{
return (nVersionThat == 0 || nVersionThis - nVersionThat < 128) ? +1 : -1;
}
else
{
return (nVersionThis == 0 || nVersionThat - nVersionThis < 128) ? -1 : +1;
}
}
/**
* Copy the lease data from the specified Lease.
*
* @param lease the Lease object to copy the data from
*/
public synchronized void copyFrom(Lease lease)
{
// import com.tangosol.util.Base;
if (lease != this)
{
_assert(Base.equals(this.getResourceKey(), lease.getResourceKey()));
_assert(getCacheIndex() == lease.getCacheIndex());
setIssuerId (lease.getIssuerId());
setHolderId (lease.getHolderId());
setHolderThreadId(lease.getHolderThreadId());
setEffectiveTime (lease.getEffectiveTime());
setExpirationTime(lease.getExpirationTime());
// the ResourceSize may not be there
int cbSize = lease.getResourceSize();
if (cbSize >= 0)
{
setResourceSize(cbSize);
}
copyVersion(lease);
}
notifyAll();
}
/**
* Copy the lease version from the specified Lease.
*
* @param lease the Lease object to copy the version info from
*/
public void copyVersion(Lease lease)
{
// the LeaseVersion may not be there
int nVersion = lease.getLeaseVersion();
if (nVersion > 0)
{
setLeaseVersion(nVersion);
}
// the ResourceVersion may not be there
nVersion = lease.getResourceVersion();
if (nVersion > 0)
{
setResourceVersion(nVersion);
}
}
/**
* Helper method used for reporting.
*/
public static String formatCacheName(int iCache, com.tangosol.coherence.component.util.daemon.queueProcessor.service.Grid service)
{
// import Component.Util.Daemon.QueueProcessor.Service.Grid.ReplicatedCache;
// import com.tangosol.net.NamedCache;
NamedCache cache = null;
if (service instanceof ReplicatedCache)
{
cache = ((ReplicatedCache) service).getCacheHandler(iCache);
}
else if (service instanceof NamedCache)
{
cache = (NamedCache) service;
}
return cache == null ?
service.getServiceName() + "[" + iCache + "]" : cache.getCacheName();
}
public static String formatStatusName(int nStatus)
{
switch (nStatus)
{
case LEASE_UNKNOWN:
return "LEASE_UNKNOWN";
case LEASE_UNISSUED:
return "LEASE_UNISSUED";
case LEASE_AVAILABLE:
return "LEASE_AVAILABLE";
case LEASE_LOCKED:
return "LEASE_LOCKED";
case LEASE_DIRTY:
return "LEASE_DIRTY";
default:
return "<invalid>";
}
}
// Accessor for the property "CacheIndex"
/**
* Getter for property CacheIndex.<p>
* The index of the cache that this Lease belongs to. This index is used by
* the cache service to get to an appropriate cache [handler].
*/
public int getCacheIndex()
{
return __m_CacheIndex;
}
// Accessor for the property "ClassLoader"
/**
* Getter for property ClassLoader.<p>
* The ClassLoader that the corresponding resource is known to be assosiated
* with.
*/
public ClassLoader getClassLoader()
{
return __m_ClassLoader;
}
// Accessor for the property "CurrentThreadId"
/**
* Getter for property CurrentThreadId.<p>
* (Calculated) Helper property that (unlike the System.identityHashcode)
* provides a unique id for a current thread.
*/
public static long getCurrentThreadId()
{
// import com.tangosol.util.Base;
// TODO: When we switch to JDK 1.5, this will become trivial:
// return Thread.currentThread().getId();
ThreadLocal tlo = getThreadIdHolder();
Long Id = (Long) tlo.get();
if (Id == null)
{
long lId = getThreadIdCounter().incrementAndGet();
tlo.set(Base.makeLong(lId));
return lId;
}
else
{
return Id.longValue();
}
}
// Accessor for the property "EffectiveTime"
/**
* Getter for property EffectiveTime.<p>
* Cluster time that the Lease was locked at.
*/
public long getEffectiveTime()
{
return __m_EffectiveTime;
}
// Accessor for the property "ExpirationTime"
/**
* Getter for property ExpirationTime.<p>
* Cluster time that the Lease expires at.
*/
public long getExpirationTime()
{
return __m_ExpirationTime;
}
// Accessor for the property "HolderId"
/**
* Getter for property HolderId.<p>
* Member id of the holder for this Lease. Value of 0 indicates that
* currently there is no holder for this Lease.
*/
public int getHolderId()
{
return __m_HolderId;
}
// Accessor for the property "HolderThreadId"
/**
* Getter for property HolderThreadId.<p>
* Unique id of the Thread which holds a lock for this Lease. This property
* has meaning only if the following holds true:
* <pre><code>
* getHolderId() == getService().getThisMember().getId()
* </code></pre>
*/
public long getHolderThreadId()
{
return __m_HolderThreadId;
}
// Accessor for the property "IssuerId"
/**
* Getter for property IssuerId.<p>
* Member id of the issuer (registrar) for this Lease. Value of 0 indicates
* that currently there is no issuer for this Lease.
*/
public int getIssuerId()
{
return __m_IssuerId;
}
// Accessor for the property "LeaseVersion"
/**
* Getter for property LeaseVersion.<p>
* The version of the Lease. It is intended to be used to resolve
* simultaneous conflicting requests.
*
* The value of LeaseVersion is in a range of 0..255, where value of zero
* represents a not existing lease and value of one represents a newly
* inserted lease.
*/
public int getLeaseVersion()
{
return __m_LeaseVersion;
}
// Accessor for the property "ResourceKey"
/**
* Getter for property ResourceKey.<p>
* Key for the resource represented by this Lease. This property is set
* during initialization only.
*
* @see #instantiate()
*/
public Object getResourceKey()
{
return __m_ResourceKey;
}
// Accessor for the property "ResourceSize"
/**
* Getter for property ResourceSize.<p>
* The size of the (serialized) resource represented by this Lease in bytes.
* It is inteneded to be used by the cache implementations that have
* automatic purge strategies that are based on the resource "weight".
* This property is calculated asynchronously and is not guaranteed to carry
* the precise value at all times. The value of -1 indicates that the
* resource has not yet been deserialized.
*/
public int getResourceSize()
{
return __m_ResourceSize;
}
// Accessor for the property "ResourceVersion"
/**
* Getter for property ResourceVersion.<p>
* The version of the resource represented by this Lease. It is intended to
* be used in the optimistic scenarios that do not "lock" prior to the
* resource updates, but instead discard the "outdated" update requests.
*
* The value of ResourceVersion is in a range of 0..255, where value of zero
* represents a not existing resource and value of one represents a newly
* inserted resource.
*/
public int getResourceVersion()
{
return __m_ResourceVersion;
}
// Accessor for the property "Service"
/**
* Getter for property Service.<p>
* Service object handling this Lease. This property is set during
* initialization only.
*
* @see #instantiateLease()
*/
public com.tangosol.coherence.component.util.daemon.queueProcessor.service.Grid getService()
{
return __m_Service;
}
// Accessor for the property "Status"
/**
* Getter for property Status.<p>
* Calculated property that returns this Lease status. The return value is
* one of:
* <ul>
* <li> LEASE_UNISSUED - a request for a lease issue has not been confirmed
* yet or the issuer is gone
* <li> LEASE_AVAILABLE - the lease is known to be available
* <li> LEASE_LOCKED - the lease is known to be held by this service member
* <li> LEASE_DIRTY - the lease is known to be held by another service
* member
* </ul>
*/
public int getStatus()
{
// import Component.Util.Daemon.QueueProcessor.Service.Grid as com.tangosol.coherence.component.util.daemon.queueProcessor.service.Grid;
// import Component.Net.MemberSet;
// Note: a locked lease may not be "unissued"!
com.tangosol.coherence.component.util.daemon.queueProcessor.service.Grid service = getService();
MemberSet setMember = service.getServiceMemberSet();
int nThisId = service.getThisMember().getId();
int nHolderId = getHolderId();
boolean fAvailable = nHolderId == 0 || setMember.getMember(nHolderId) == null;
if (!fAvailable)
{
// check the expiration time being more pessimistic
// on the holder side (compensating for time difference)
long lExpirationTime = getExpirationTime();
if (nThisId == nHolderId)
{
// 10 == service.getCluster().getClusterService().getTimestampMaxVariance();
lExpirationTime -= 10;
}
fAvailable = lExpirationTime <= service.getClusterTime();
}
if (fAvailable)
{
int nIssuerId = getIssuerId();
return nIssuerId == 0 || setMember.getMember(nIssuerId) == null ?
LEASE_UNISSUED : LEASE_AVAILABLE;
}
else
{
// since the Lease is thread agnostic, the further decision
// is made at the level where the calling thread is known
// (see #ReplicatedCache.getThreadStatus(Lease))
return nHolderId == nThisId ?
LEASE_LOCKED : LEASE_DIRTY;
}
}
// Accessor for the property "ThreadIdCounter"
/**
* Getter for property ThreadIdCounter.<p>
* Atomic counter used to assign unique thread identifiers.
*/
private static java.util.concurrent.atomic.AtomicLong getThreadIdCounter()
{
return __s_ThreadIdCounter;
}
// Accessor for the property "ThreadIdHolder"
/**
* Getter for property ThreadIdHolder.<p>
* ThreadLocal object holding unique thread identifiers.
*/
private static ThreadLocal getThreadIdHolder()
{
return __s_ThreadIdHolder;
}
/**
* Helper method to increment the version
*/
public synchronized void incrementLeaseVersion()
{
int nVersion = getLeaseVersion();
if (++nVersion > 255)
{
nVersion = 2;
}
setLeaseVersion(nVersion);
}
/**
* Helper method to increment the version
*/
public synchronized void incrementResourceVersion()
{
int nVersion = getResourceVersion();
if (++nVersion > 255)
{
nVersion = 2;
}
setResourceVersion(nVersion);
}
/**
* Instantiate a new Lease for the specified cache index, resource key and
* service.
*/
public static Lease instantiate(int iCache, Object oKey, com.tangosol.coherence.component.util.daemon.queueProcessor.service.Grid service)
{
Lease lease = new Lease();
lease.setCacheIndex(iCache);
lease.setResourceKey(oKey);
lease.setService(service);
return lease;
}
public void read(java.io.DataInput stream)
throws java.io.IOException
{
// import com.tangosol.util.ExternalizableHelper;
// LeaseMessage is responsible for CacheIndex and ResourceKey deserialization
setIssuerId(stream.readUnsignedShort());
setHolderId(stream.readUnsignedShort());
setHolderThreadId(ExternalizableHelper.readLong(stream));
setEffectiveTime(ExternalizableHelper.readLong(stream));
setExpirationTime(ExternalizableHelper.readLong(stream));
int nVersion = stream.readUnsignedShort();
setLeaseVersion(nVersion & 0xFF);
setResourceVersion((nVersion & 0xFF00) >>> 8);
}
// Accessor for the property "CacheIndex"
/**
* Setter for property CacheIndex.<p>
* The index of the cache that this Lease belongs to. This index is used by
* the cache service to get to an appropriate cache [handler].
*/
public void setCacheIndex(int iCache)
{
__m_CacheIndex = iCache;
}
// Accessor for the property "ClassLoader"
/**
* Setter for property ClassLoader.<p>
* The ClassLoader that the corresponding resource is known to be assosiated
* with.
*/
public void setClassLoader(ClassLoader loader)
{
__m_ClassLoader = loader;
}
// Accessor for the property "EffectiveTime"
/**
* Setter for property EffectiveTime.<p>
* Cluster time that the Lease was locked at.
*/
public void setEffectiveTime(long lDatetime)
{
__m_EffectiveTime = lDatetime;
}
// Accessor for the property "ExpirationTime"
/**
* Setter for property ExpirationTime.<p>
* Cluster time that the Lease expires at.
*/
public void setExpirationTime(long lDatetime)
{
__m_ExpirationTime = lDatetime;
}
// Accessor for the property "HolderId"
/**
* Setter for property HolderId.<p>
* Member id of the holder for this Lease. Value of 0 indicates that
* currently there is no holder for this Lease.
*/
public void setHolderId(int nId)
{
__m_HolderId = nId;
}
// Accessor for the property "HolderThreadId"
/**
* Setter for property HolderThreadId.<p>
* Unique id of the Thread which holds a lock for this Lease. This property
* has meaning only if the following holds true:
* <pre><code>
* getHolderId() == getService().getThisMember().getId()
* </code></pre>
*/
public void setHolderThreadId(long lThreadId)
{
__m_HolderThreadId = lThreadId;
}
// Accessor for the property "IssuerId"
/**
* Setter for property IssuerId.<p>
* Member id of the issuer (registrar) for this Lease. Value of 0 indicates
* that currently there is no issuer for this Lease.
*/
public void setIssuerId(int nId)
{
__m_IssuerId = nId;
}
// Accessor for the property "LeaseVersion"
/**
* Setter for property LeaseVersion.<p>
* The version of the Lease. It is intended to be used to resolve
* simultaneous conflicting requests.
*
* The value of LeaseVersion is in a range of 0..255, where value of zero
* represents a not existing lease and value of one represents a newly
* inserted lease.
*/
protected void setLeaseVersion(int nVersion)
{
__m_LeaseVersion = nVersion;
}
// Accessor for the property "ResourceKey"
/**
* Setter for property ResourceKey.<p>
* Key for the resource represented by this Lease. This property is set
* during initialization only.
*
* @see #instantiate()
*/
protected void setResourceKey(Object oKey)
{
__m_ResourceKey = oKey;
}
// Accessor for the property "ResourceSize"
/**
* Setter for property ResourceSize.<p>
* The size of the (serialized) resource represented by this Lease in bytes.
* It is inteneded to be used by the cache implementations that have
* automatic purge strategies that are based on the resource "weight".
* This property is calculated asynchronously and is not guaranteed to carry
* the precise value at all times. The value of -1 indicates that the
* resource has not yet been deserialized.
*/
public void setResourceSize(int cbSize)
{
__m_ResourceSize = cbSize;
}
// Accessor for the property "ResourceVersion"
/**
* Setter for property ResourceVersion.<p>
* The version of the resource represented by this Lease. It is intended to
* be used in the optimistic scenarios that do not "lock" prior to the
* resource updates, but instead discard the "outdated" update requests.
*
* The value of ResourceVersion is in a range of 0..255, where value of zero
* represents a not existing resource and value of one represents a newly
* inserted resource.
*/
protected void setResourceVersion(int nVersion)
{
__m_ResourceVersion = nVersion;
}
// Accessor for the property "Service"
/**
* Setter for property Service.<p>
* Service object handling this Lease. This property is set during
* initialization only.
*
* @see #instantiateLease()
*/
protected void setService(com.tangosol.coherence.component.util.daemon.queueProcessor.service.Grid service)
{
__m_Service = service;
}
// Accessor for the property "ThreadIdCounter"
/**
* Setter for property ThreadIdCounter.<p>
* Atomic counter used to assign unique thread identifiers.
*/
private static void setThreadIdCounter(java.util.concurrent.atomic.AtomicLong counter)
{
__s_ThreadIdCounter = counter;
}
// Accessor for the property "ThreadIdHolder"
/**
* Setter for property ThreadIdHolder.<p>
* ThreadLocal object holding unique thread identifiers.
*/
private static void setThreadIdHolder(ThreadLocal tlo)
{
__s_ThreadIdHolder = tlo;
}
// Declared at the super level
public String toString()
{
// import Component.Util.Daemon.QueueProcessor.Service.Grid as com.tangosol.coherence.component.util.daemon.queueProcessor.service.Grid;
// import java.util.Date;
StringBuffer sb = new StringBuffer();
com.tangosol.coherence.component.util.daemon.queueProcessor.service.Grid service = getService();
int nIssuerId = getIssuerId();
int nHolderId = getHolderId();
long lThreadId = getHolderThreadId();
int cbSize = getResourceSize();
int nStatus = getStatus();
sb.append("Lease: ")
.append(getResourceKey())
.append(" (Cache=")
.append(formatCacheName(getCacheIndex(), service))
.append(", Size=")
.append(cbSize == -1 ? "Unknown" : String.valueOf(cbSize))
.append(", Version=")
.append(getLeaseVersion())
.append('/')
.append(getResourceVersion())
.append(", IssuerId=")
.append(nIssuerId)
.append(", HolderId=")
.append(nHolderId)
.append(", Status=")
.append(formatStatusName(nStatus));
if (nStatus == LEASE_LOCKED || nStatus == LEASE_DIRTY)
{
if (nHolderId == service.getThisMember().getId())
{
sb.append(", Held by threadId=")
.append(lThreadId);
}
sb.append(", Locked at ")
.append(new Date(getEffectiveTime()))
.append(", Expires in ")
.append(getExpirationTime() - service.getClusterTime())
.append(" millis");
}
else
{
if (nHolderId == 0)
{
sb.append(", Last locked at ")
.append(new Date(getEffectiveTime()));
}
else
{
sb.append(", Last held by member ")
.append(nHolderId)
.append(" from ")
.append(new Date(getEffectiveTime()))
.append(" to ")
.append(new Date(getExpirationTime()));
}
}
sb.append(')');
return sb.toString();
}
/**
* Unlock the lease.
*/
public synchronized void unlock()
{
setHolderId(0);
setHolderThreadId(0L);
setExpirationTime(getService().getClusterTime());
notifyAll();
}
/**
* Validate the lease.
*/
public synchronized void validate()
{
MemberSet setMember = getService().getServiceMemberSet();
// check the holder
int nHolderId = getHolderId();
if (nHolderId != 0 && setMember.getMember(nHolderId) == null)
{
// the lease holder is gone - remove the lock
unlock();
}
// check the expiration time
if (nHolderId != 0 && getExpirationTime() <= getService().getClusterTime())
{
// the lease has expired -- remove the lock
unlock();
}
// check the issuer
int nIssuerId = getIssuerId();
if (nIssuerId != 0 && setMember.getMember(nIssuerId) == null)
{
// the issuer is gone
setIssuerId(0);
notifyAll();
}
}
public void write(java.io.DataOutput stream)
throws java.io.IOException
{
// import com.tangosol.util.ExternalizableHelper;
// LeaseMessage is responsible for CacheIndex and ResourceKey serialization
stream.writeShort(getIssuerId());
stream.writeShort(getHolderId());
ExternalizableHelper.writeLong(stream, getHolderThreadId());
ExternalizableHelper.writeLong(stream, getEffectiveTime());
ExternalizableHelper.writeLong(stream, getExpirationTime());
stream.writeShort(getLeaseVersion() | (getResourceVersion() << 8));
}
}
|
googleapis/google-api-java-client-services | 34,966 | clients/google-api-services-admin/datatransfer_v1/2.0.0/com/google/api/services/datatransfer/DataTransfer.java | /*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.datatransfer;
/**
* Service definition for DataTransfer (datatransfer_v1).
*
* <p>
* Admin SDK lets administrators of enterprise domains to view and manage resources like user, groups etc. It also provides audit and usage reports of domain.
* </p>
*
* <p>
* For more information about this service, see the
* <a href="https://developers.google.com/workspace/admin/" target="_blank">API Documentation</a>
* </p>
*
* <p>
* This service uses {@link DataTransferRequestInitializer} to initialize global parameters via its
* {@link Builder}.
* </p>
*
* @since 1.3
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public class DataTransfer extends com.google.api.client.googleapis.services.json.AbstractGoogleJsonClient {
// Note: Leave this static initializer at the top of the file.
static {
com.google.api.client.util.Preconditions.checkState(
(com.google.api.client.googleapis.GoogleUtils.MAJOR_VERSION == 1 &&
(com.google.api.client.googleapis.GoogleUtils.MINOR_VERSION >= 32 ||
(com.google.api.client.googleapis.GoogleUtils.MINOR_VERSION == 31 &&
com.google.api.client.googleapis.GoogleUtils.BUGFIX_VERSION >= 1))) ||
com.google.api.client.googleapis.GoogleUtils.MAJOR_VERSION >= 2,
"You are currently running with version %s of google-api-client. " +
"You need at least version 1.31.1 of google-api-client to run version " +
"2.0.0 of the Admin SDK API library.", com.google.api.client.googleapis.GoogleUtils.VERSION);
}
/**
* The default encoded root URL of the service. This is determined when the library is generated
* and normally should not be changed.
*
* @since 1.7
*/
public static final String DEFAULT_ROOT_URL = "https://admin.googleapis.com/";
/**
* The default encoded mTLS root URL of the service. This is determined when the library is generated
* and normally should not be changed.
*
* @since 1.31
*/
public static final String DEFAULT_MTLS_ROOT_URL = "https://admin.mtls.googleapis.com/";
/**
* The default encoded service path of the service. This is determined when the library is
* generated and normally should not be changed.
*
* @since 1.7
*/
public static final String DEFAULT_SERVICE_PATH = "";
/**
* The default encoded batch path of the service. This is determined when the library is
* generated and normally should not be changed.
*
* @since 1.23
*/
public static final String DEFAULT_BATCH_PATH = "batch";
/**
* The default encoded base URL of the service. This is determined when the library is generated
* and normally should not be changed.
*/
public static final String DEFAULT_BASE_URL = DEFAULT_ROOT_URL + DEFAULT_SERVICE_PATH;
/**
* Constructor.
*
* <p>
* Use {@link Builder} if you need to specify any of the optional parameters.
* </p>
*
* @param transport HTTP transport, which should normally be:
* <ul>
* <li>Google App Engine:
* {@code com.google.api.client.extensions.appengine.http.UrlFetchTransport}</li>
* <li>Android: {@code newCompatibleTransport} from
* {@code com.google.api.client.extensions.android.http.AndroidHttp}</li>
* <li>Java: {@link com.google.api.client.googleapis.javanet.GoogleNetHttpTransport#newTrustedTransport()}
* </li>
* </ul>
* @param jsonFactory JSON factory, which may be:
* <ul>
* <li>Jackson: {@code com.google.api.client.json.jackson2.JacksonFactory}</li>
* <li>Google GSON: {@code com.google.api.client.json.gson.GsonFactory}</li>
* <li>Android Honeycomb or higher:
* {@code com.google.api.client.extensions.android.json.AndroidJsonFactory}</li>
* </ul>
* @param httpRequestInitializer HTTP request initializer or {@code null} for none
* @since 1.7
*/
public DataTransfer(com.google.api.client.http.HttpTransport transport, com.google.api.client.json.JsonFactory jsonFactory,
com.google.api.client.http.HttpRequestInitializer httpRequestInitializer) {
this(new Builder(transport, jsonFactory, httpRequestInitializer));
}
/**
* @param builder builder
*/
DataTransfer(Builder builder) {
super(builder);
}
@Override
protected void initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest<?> httpClientRequest) throws java.io.IOException {
super.initialize(httpClientRequest);
}
/**
* An accessor for creating requests from the Applications collection.
*
* <p>The typical use is:</p>
* <pre>
* {@code DataTransfer admin = new DataTransfer(...);}
* {@code DataTransfer.Applications.List request = admin.applications().list(parameters ...)}
* </pre>
*
* @return the resource collection
*/
public Applications applications() {
return new Applications();
}
/**
* The "applications" collection of methods.
*/
public class Applications {
/**
* Retrieves information about an application for the given application ID.
*
* Create a request for the method "applications.get".
*
* This request holds the parameters needed by the admin server. After setting any optional
* parameters, call the {@link Get#execute()} method to invoke the remote operation.
*
* @param applicationId ID of the application resource to be retrieved.
* @return the request
*/
public Get get(java.lang.Long applicationId) throws java.io.IOException {
Get result = new Get(applicationId);
initialize(result);
return result;
}
public class Get extends DataTransferRequest<com.google.api.services.datatransfer.model.Application> {
private static final String REST_PATH = "admin/datatransfer/v1/applications/{applicationId}";
/**
* Retrieves information about an application for the given application ID.
*
* Create a request for the method "applications.get".
*
* This request holds the parameters needed by the the admin server. After setting any optional
* parameters, call the {@link Get#execute()} method to invoke the remote operation. <p> {@link
* Get#initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest)} must be
* called to initialize this instance immediately after invoking the constructor. </p>
*
* @param applicationId ID of the application resource to be retrieved.
* @since 1.13
*/
protected Get(java.lang.Long applicationId) {
super(DataTransfer.this, "GET", REST_PATH, null, com.google.api.services.datatransfer.model.Application.class);
this.applicationId = com.google.api.client.util.Preconditions.checkNotNull(applicationId, "Required parameter applicationId must be specified.");
}
@Override
public com.google.api.client.http.HttpResponse executeUsingHead() throws java.io.IOException {
return super.executeUsingHead();
}
@Override
public com.google.api.client.http.HttpRequest buildHttpRequestUsingHead() throws java.io.IOException {
return super.buildHttpRequestUsingHead();
}
@Override
public Get set$Xgafv(java.lang.String $Xgafv) {
return (Get) super.set$Xgafv($Xgafv);
}
@Override
public Get setAccessToken(java.lang.String accessToken) {
return (Get) super.setAccessToken(accessToken);
}
@Override
public Get setAlt(java.lang.String alt) {
return (Get) super.setAlt(alt);
}
@Override
public Get setCallback(java.lang.String callback) {
return (Get) super.setCallback(callback);
}
@Override
public Get setFields(java.lang.String fields) {
return (Get) super.setFields(fields);
}
@Override
public Get setKey(java.lang.String key) {
return (Get) super.setKey(key);
}
@Override
public Get setOauthToken(java.lang.String oauthToken) {
return (Get) super.setOauthToken(oauthToken);
}
@Override
public Get setPrettyPrint(java.lang.Boolean prettyPrint) {
return (Get) super.setPrettyPrint(prettyPrint);
}
@Override
public Get setQuotaUser(java.lang.String quotaUser) {
return (Get) super.setQuotaUser(quotaUser);
}
@Override
public Get setUploadType(java.lang.String uploadType) {
return (Get) super.setUploadType(uploadType);
}
@Override
public Get setUploadProtocol(java.lang.String uploadProtocol) {
return (Get) super.setUploadProtocol(uploadProtocol);
}
/** ID of the application resource to be retrieved. */
@com.google.api.client.util.Key
private java.lang.Long applicationId;
/** ID of the application resource to be retrieved.
*/
public java.lang.Long getApplicationId() {
return applicationId;
}
/** ID of the application resource to be retrieved. */
public Get setApplicationId(java.lang.Long applicationId) {
this.applicationId = applicationId;
return this;
}
@Override
public Get set(String parameterName, Object value) {
return (Get) super.set(parameterName, value);
}
}
/**
* Lists the applications available for data transfer for a customer.
*
* Create a request for the method "applications.list".
*
* This request holds the parameters needed by the admin server. After setting any optional
* parameters, call the {@link List#execute()} method to invoke the remote operation.
*
* @return the request
*/
public List list() throws java.io.IOException {
List result = new List();
initialize(result);
return result;
}
public class List extends DataTransferRequest<com.google.api.services.datatransfer.model.ApplicationsListResponse> {
private static final String REST_PATH = "admin/datatransfer/v1/applications";
/**
* Lists the applications available for data transfer for a customer.
*
* Create a request for the method "applications.list".
*
* This request holds the parameters needed by the the admin server. After setting any optional
* parameters, call the {@link List#execute()} method to invoke the remote operation. <p> {@link
* List#initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest)} must be
* called to initialize this instance immediately after invoking the constructor. </p>
*
* @since 1.13
*/
protected List() {
super(DataTransfer.this, "GET", REST_PATH, null, com.google.api.services.datatransfer.model.ApplicationsListResponse.class);
}
@Override
public com.google.api.client.http.HttpResponse executeUsingHead() throws java.io.IOException {
return super.executeUsingHead();
}
@Override
public com.google.api.client.http.HttpRequest buildHttpRequestUsingHead() throws java.io.IOException {
return super.buildHttpRequestUsingHead();
}
@Override
public List set$Xgafv(java.lang.String $Xgafv) {
return (List) super.set$Xgafv($Xgafv);
}
@Override
public List setAccessToken(java.lang.String accessToken) {
return (List) super.setAccessToken(accessToken);
}
@Override
public List setAlt(java.lang.String alt) {
return (List) super.setAlt(alt);
}
@Override
public List setCallback(java.lang.String callback) {
return (List) super.setCallback(callback);
}
@Override
public List setFields(java.lang.String fields) {
return (List) super.setFields(fields);
}
@Override
public List setKey(java.lang.String key) {
return (List) super.setKey(key);
}
@Override
public List setOauthToken(java.lang.String oauthToken) {
return (List) super.setOauthToken(oauthToken);
}
@Override
public List setPrettyPrint(java.lang.Boolean prettyPrint) {
return (List) super.setPrettyPrint(prettyPrint);
}
@Override
public List setQuotaUser(java.lang.String quotaUser) {
return (List) super.setQuotaUser(quotaUser);
}
@Override
public List setUploadType(java.lang.String uploadType) {
return (List) super.setUploadType(uploadType);
}
@Override
public List setUploadProtocol(java.lang.String uploadProtocol) {
return (List) super.setUploadProtocol(uploadProtocol);
}
/** Immutable ID of the Google Workspace account. */
@com.google.api.client.util.Key
private java.lang.String customerId;
/** Immutable ID of the Google Workspace account.
*/
public java.lang.String getCustomerId() {
return customerId;
}
/** Immutable ID of the Google Workspace account. */
public List setCustomerId(java.lang.String customerId) {
this.customerId = customerId;
return this;
}
/** Maximum number of results to return. Default is 100. */
@com.google.api.client.util.Key
private java.lang.Integer maxResults;
/** Maximum number of results to return. Default is 100.
[minimum: 1] [maximum: 500]
*/
public java.lang.Integer getMaxResults() {
return maxResults;
}
/** Maximum number of results to return. Default is 100. */
public List setMaxResults(java.lang.Integer maxResults) {
this.maxResults = maxResults;
return this;
}
/** Token to specify next page in the list. */
@com.google.api.client.util.Key
private java.lang.String pageToken;
/** Token to specify next page in the list.
*/
public java.lang.String getPageToken() {
return pageToken;
}
/** Token to specify next page in the list. */
public List setPageToken(java.lang.String pageToken) {
this.pageToken = pageToken;
return this;
}
@Override
public List set(String parameterName, Object value) {
return (List) super.set(parameterName, value);
}
}
}
/**
* An accessor for creating requests from the Transfers collection.
*
* <p>The typical use is:</p>
* <pre>
* {@code DataTransfer admin = new DataTransfer(...);}
* {@code DataTransfer.Transfers.List request = admin.transfers().list(parameters ...)}
* </pre>
*
* @return the resource collection
*/
public Transfers transfers() {
return new Transfers();
}
/**
* The "transfers" collection of methods.
*/
public class Transfers {
/**
* Retrieves a data transfer request by its resource ID.
*
* Create a request for the method "transfers.get".
*
* This request holds the parameters needed by the admin server. After setting any optional
* parameters, call the {@link Get#execute()} method to invoke the remote operation.
*
* @param dataTransferId ID of the resource to be retrieved. This is returned in the response from the insert method.
* @return the request
*/
public Get get(java.lang.String dataTransferId) throws java.io.IOException {
Get result = new Get(dataTransferId);
initialize(result);
return result;
}
public class Get extends DataTransferRequest<com.google.api.services.datatransfer.model.DataTransfer> {
private static final String REST_PATH = "admin/datatransfer/v1/transfers/{dataTransferId}";
/**
* Retrieves a data transfer request by its resource ID.
*
* Create a request for the method "transfers.get".
*
* This request holds the parameters needed by the the admin server. After setting any optional
* parameters, call the {@link Get#execute()} method to invoke the remote operation. <p> {@link
* Get#initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest)} must be
* called to initialize this instance immediately after invoking the constructor. </p>
*
* @param dataTransferId ID of the resource to be retrieved. This is returned in the response from the insert method.
* @since 1.13
*/
protected Get(java.lang.String dataTransferId) {
super(DataTransfer.this, "GET", REST_PATH, null, com.google.api.services.datatransfer.model.DataTransfer.class);
this.dataTransferId = com.google.api.client.util.Preconditions.checkNotNull(dataTransferId, "Required parameter dataTransferId must be specified.");
}
@Override
public com.google.api.client.http.HttpResponse executeUsingHead() throws java.io.IOException {
return super.executeUsingHead();
}
@Override
public com.google.api.client.http.HttpRequest buildHttpRequestUsingHead() throws java.io.IOException {
return super.buildHttpRequestUsingHead();
}
@Override
public Get set$Xgafv(java.lang.String $Xgafv) {
return (Get) super.set$Xgafv($Xgafv);
}
@Override
public Get setAccessToken(java.lang.String accessToken) {
return (Get) super.setAccessToken(accessToken);
}
@Override
public Get setAlt(java.lang.String alt) {
return (Get) super.setAlt(alt);
}
@Override
public Get setCallback(java.lang.String callback) {
return (Get) super.setCallback(callback);
}
@Override
public Get setFields(java.lang.String fields) {
return (Get) super.setFields(fields);
}
@Override
public Get setKey(java.lang.String key) {
return (Get) super.setKey(key);
}
@Override
public Get setOauthToken(java.lang.String oauthToken) {
return (Get) super.setOauthToken(oauthToken);
}
@Override
public Get setPrettyPrint(java.lang.Boolean prettyPrint) {
return (Get) super.setPrettyPrint(prettyPrint);
}
@Override
public Get setQuotaUser(java.lang.String quotaUser) {
return (Get) super.setQuotaUser(quotaUser);
}
@Override
public Get setUploadType(java.lang.String uploadType) {
return (Get) super.setUploadType(uploadType);
}
@Override
public Get setUploadProtocol(java.lang.String uploadProtocol) {
return (Get) super.setUploadProtocol(uploadProtocol);
}
/**
* ID of the resource to be retrieved. This is returned in the response from the insert
* method.
*/
@com.google.api.client.util.Key
private java.lang.String dataTransferId;
/** ID of the resource to be retrieved. This is returned in the response from the insert method.
*/
public java.lang.String getDataTransferId() {
return dataTransferId;
}
/**
* ID of the resource to be retrieved. This is returned in the response from the insert
* method.
*/
public Get setDataTransferId(java.lang.String dataTransferId) {
this.dataTransferId = dataTransferId;
return this;
}
@Override
public Get set(String parameterName, Object value) {
return (Get) super.set(parameterName, value);
}
}
/**
* Inserts a data transfer request. See the [Transfer
* parameters](https://developers.google.com/workspace/admin/data-transfer/v1/parameters) reference
* for specific application requirements.
*
* Create a request for the method "transfers.insert".
*
* This request holds the parameters needed by the admin server. After setting any optional
* parameters, call the {@link Insert#execute()} method to invoke the remote operation.
*
* @param content the {@link com.google.api.services.datatransfer.model.DataTransfer}
* @return the request
*/
public Insert insert(com.google.api.services.datatransfer.model.DataTransfer content) throws java.io.IOException {
Insert result = new Insert(content);
initialize(result);
return result;
}
public class Insert extends DataTransferRequest<com.google.api.services.datatransfer.model.DataTransfer> {
private static final String REST_PATH = "admin/datatransfer/v1/transfers";
/**
* Inserts a data transfer request. See the [Transfer
* parameters](https://developers.google.com/workspace/admin/data-transfer/v1/parameters)
* reference for specific application requirements.
*
* Create a request for the method "transfers.insert".
*
* This request holds the parameters needed by the the admin server. After setting any optional
* parameters, call the {@link Insert#execute()} method to invoke the remote operation. <p> {@link
* Insert#initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest)} must
* be called to initialize this instance immediately after invoking the constructor. </p>
*
* @param content the {@link com.google.api.services.datatransfer.model.DataTransfer}
* @since 1.13
*/
protected Insert(com.google.api.services.datatransfer.model.DataTransfer content) {
super(DataTransfer.this, "POST", REST_PATH, content, com.google.api.services.datatransfer.model.DataTransfer.class);
}
@Override
public Insert set$Xgafv(java.lang.String $Xgafv) {
return (Insert) super.set$Xgafv($Xgafv);
}
@Override
public Insert setAccessToken(java.lang.String accessToken) {
return (Insert) super.setAccessToken(accessToken);
}
@Override
public Insert setAlt(java.lang.String alt) {
return (Insert) super.setAlt(alt);
}
@Override
public Insert setCallback(java.lang.String callback) {
return (Insert) super.setCallback(callback);
}
@Override
public Insert setFields(java.lang.String fields) {
return (Insert) super.setFields(fields);
}
@Override
public Insert setKey(java.lang.String key) {
return (Insert) super.setKey(key);
}
@Override
public Insert setOauthToken(java.lang.String oauthToken) {
return (Insert) super.setOauthToken(oauthToken);
}
@Override
public Insert setPrettyPrint(java.lang.Boolean prettyPrint) {
return (Insert) super.setPrettyPrint(prettyPrint);
}
@Override
public Insert setQuotaUser(java.lang.String quotaUser) {
return (Insert) super.setQuotaUser(quotaUser);
}
@Override
public Insert setUploadType(java.lang.String uploadType) {
return (Insert) super.setUploadType(uploadType);
}
@Override
public Insert setUploadProtocol(java.lang.String uploadProtocol) {
return (Insert) super.setUploadProtocol(uploadProtocol);
}
@Override
public Insert set(String parameterName, Object value) {
return (Insert) super.set(parameterName, value);
}
}
/**
* Lists the transfers for a customer by source user, destination user, or status.
*
* Create a request for the method "transfers.list".
*
* This request holds the parameters needed by the admin server. After setting any optional
* parameters, call the {@link List#execute()} method to invoke the remote operation.
*
* @return the request
*/
public List list() throws java.io.IOException {
List result = new List();
initialize(result);
return result;
}
public class List extends DataTransferRequest<com.google.api.services.datatransfer.model.DataTransfersListResponse> {
private static final String REST_PATH = "admin/datatransfer/v1/transfers";
/**
* Lists the transfers for a customer by source user, destination user, or status.
*
* Create a request for the method "transfers.list".
*
* This request holds the parameters needed by the the admin server. After setting any optional
* parameters, call the {@link List#execute()} method to invoke the remote operation. <p> {@link
* List#initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest)} must be
* called to initialize this instance immediately after invoking the constructor. </p>
*
* @since 1.13
*/
protected List() {
super(DataTransfer.this, "GET", REST_PATH, null, com.google.api.services.datatransfer.model.DataTransfersListResponse.class);
}
@Override
public com.google.api.client.http.HttpResponse executeUsingHead() throws java.io.IOException {
return super.executeUsingHead();
}
@Override
public com.google.api.client.http.HttpRequest buildHttpRequestUsingHead() throws java.io.IOException {
return super.buildHttpRequestUsingHead();
}
@Override
public List set$Xgafv(java.lang.String $Xgafv) {
return (List) super.set$Xgafv($Xgafv);
}
@Override
public List setAccessToken(java.lang.String accessToken) {
return (List) super.setAccessToken(accessToken);
}
@Override
public List setAlt(java.lang.String alt) {
return (List) super.setAlt(alt);
}
@Override
public List setCallback(java.lang.String callback) {
return (List) super.setCallback(callback);
}
@Override
public List setFields(java.lang.String fields) {
return (List) super.setFields(fields);
}
@Override
public List setKey(java.lang.String key) {
return (List) super.setKey(key);
}
@Override
public List setOauthToken(java.lang.String oauthToken) {
return (List) super.setOauthToken(oauthToken);
}
@Override
public List setPrettyPrint(java.lang.Boolean prettyPrint) {
return (List) super.setPrettyPrint(prettyPrint);
}
@Override
public List setQuotaUser(java.lang.String quotaUser) {
return (List) super.setQuotaUser(quotaUser);
}
@Override
public List setUploadType(java.lang.String uploadType) {
return (List) super.setUploadType(uploadType);
}
@Override
public List setUploadProtocol(java.lang.String uploadProtocol) {
return (List) super.setUploadProtocol(uploadProtocol);
}
/** Immutable ID of the Google Workspace account. */
@com.google.api.client.util.Key
private java.lang.String customerId;
/** Immutable ID of the Google Workspace account.
*/
public java.lang.String getCustomerId() {
return customerId;
}
/** Immutable ID of the Google Workspace account. */
public List setCustomerId(java.lang.String customerId) {
this.customerId = customerId;
return this;
}
/** Maximum number of results to return. Default is 100. */
@com.google.api.client.util.Key
private java.lang.Integer maxResults;
/** Maximum number of results to return. Default is 100.
[minimum: 1] [maximum: 500]
*/
public java.lang.Integer getMaxResults() {
return maxResults;
}
/** Maximum number of results to return. Default is 100. */
public List setMaxResults(java.lang.Integer maxResults) {
this.maxResults = maxResults;
return this;
}
/** Destination user's profile ID. */
@com.google.api.client.util.Key
private java.lang.String newOwnerUserId;
/** Destination user's profile ID.
*/
public java.lang.String getNewOwnerUserId() {
return newOwnerUserId;
}
/** Destination user's profile ID. */
public List setNewOwnerUserId(java.lang.String newOwnerUserId) {
this.newOwnerUserId = newOwnerUserId;
return this;
}
/** Source user's profile ID. */
@com.google.api.client.util.Key
private java.lang.String oldOwnerUserId;
/** Source user's profile ID.
*/
public java.lang.String getOldOwnerUserId() {
return oldOwnerUserId;
}
/** Source user's profile ID. */
public List setOldOwnerUserId(java.lang.String oldOwnerUserId) {
this.oldOwnerUserId = oldOwnerUserId;
return this;
}
/** Token to specify the next page in the list. */
@com.google.api.client.util.Key
private java.lang.String pageToken;
/** Token to specify the next page in the list.
*/
public java.lang.String getPageToken() {
return pageToken;
}
/** Token to specify the next page in the list. */
public List setPageToken(java.lang.String pageToken) {
this.pageToken = pageToken;
return this;
}
/** Status of the transfer. */
@com.google.api.client.util.Key
private java.lang.String status;
/** Status of the transfer.
*/
public java.lang.String getStatus() {
return status;
}
/** Status of the transfer. */
public List setStatus(java.lang.String status) {
this.status = status;
return this;
}
@Override
public List set(String parameterName, Object value) {
return (List) super.set(parameterName, value);
}
}
}
/**
* Builder for {@link DataTransfer}.
*
* <p>
* Implementation is not thread-safe.
* </p>
*
* @since 1.3.0
*/
public static final class Builder extends com.google.api.client.googleapis.services.json.AbstractGoogleJsonClient.Builder {
private static String chooseEndpoint(com.google.api.client.http.HttpTransport transport) {
// If the GOOGLE_API_USE_MTLS_ENDPOINT environment variable value is "always", use mTLS endpoint.
// If the env variable is "auto", use mTLS endpoint if and only if the transport is mTLS.
// Use the regular endpoint for all other cases.
String useMtlsEndpoint = System.getenv("GOOGLE_API_USE_MTLS_ENDPOINT");
useMtlsEndpoint = useMtlsEndpoint == null ? "auto" : useMtlsEndpoint;
if ("always".equals(useMtlsEndpoint) || ("auto".equals(useMtlsEndpoint) && transport != null && transport.isMtls())) {
return DEFAULT_MTLS_ROOT_URL;
}
return DEFAULT_ROOT_URL;
}
/**
* Returns an instance of a new builder.
*
* @param transport HTTP transport, which should normally be:
* <ul>
* <li>Google App Engine:
* {@code com.google.api.client.extensions.appengine.http.UrlFetchTransport}</li>
* <li>Android: {@code newCompatibleTransport} from
* {@code com.google.api.client.extensions.android.http.AndroidHttp}</li>
* <li>Java: {@link com.google.api.client.googleapis.javanet.GoogleNetHttpTransport#newTrustedTransport()}
* </li>
* </ul>
* @param jsonFactory JSON factory, which may be:
* <ul>
* <li>Jackson: {@code com.google.api.client.json.jackson2.JacksonFactory}</li>
* <li>Google GSON: {@code com.google.api.client.json.gson.GsonFactory}</li>
* <li>Android Honeycomb or higher:
* {@code com.google.api.client.extensions.android.json.AndroidJsonFactory}</li>
* </ul>
* @param httpRequestInitializer HTTP request initializer or {@code null} for none
* @since 1.7
*/
public Builder(com.google.api.client.http.HttpTransport transport, com.google.api.client.json.JsonFactory jsonFactory,
com.google.api.client.http.HttpRequestInitializer httpRequestInitializer) {
super(
transport,
jsonFactory,
Builder.chooseEndpoint(transport),
DEFAULT_SERVICE_PATH,
httpRequestInitializer,
false);
setBatchPath(DEFAULT_BATCH_PATH);
}
/** Builds a new instance of {@link DataTransfer}. */
@Override
public DataTransfer build() {
return new DataTransfer(this);
}
@Override
public Builder setRootUrl(String rootUrl) {
return (Builder) super.setRootUrl(rootUrl);
}
@Override
public Builder setServicePath(String servicePath) {
return (Builder) super.setServicePath(servicePath);
}
@Override
public Builder setBatchPath(String batchPath) {
return (Builder) super.setBatchPath(batchPath);
}
@Override
public Builder setHttpRequestInitializer(com.google.api.client.http.HttpRequestInitializer httpRequestInitializer) {
return (Builder) super.setHttpRequestInitializer(httpRequestInitializer);
}
@Override
public Builder setApplicationName(String applicationName) {
return (Builder) super.setApplicationName(applicationName);
}
@Override
public Builder setSuppressPatternChecks(boolean suppressPatternChecks) {
return (Builder) super.setSuppressPatternChecks(suppressPatternChecks);
}
@Override
public Builder setSuppressRequiredParameterChecks(boolean suppressRequiredParameterChecks) {
return (Builder) super.setSuppressRequiredParameterChecks(suppressRequiredParameterChecks);
}
@Override
public Builder setSuppressAllChecks(boolean suppressAllChecks) {
return (Builder) super.setSuppressAllChecks(suppressAllChecks);
}
/**
* Set the {@link DataTransferRequestInitializer}.
*
* @since 1.12
*/
public Builder setDataTransferRequestInitializer(
DataTransferRequestInitializer datatransferRequestInitializer) {
return (Builder) super.setGoogleClientRequestInitializer(datatransferRequestInitializer);
}
@Override
public Builder setGoogleClientRequestInitializer(
com.google.api.client.googleapis.services.GoogleClientRequestInitializer googleClientRequestInitializer) {
return (Builder) super.setGoogleClientRequestInitializer(googleClientRequestInitializer);
}
@Override
public Builder setUniverseDomain(String universeDomain) {
return (Builder) super.setUniverseDomain(universeDomain);
}
}
}
|
googleapis/google-cloud-java | 34,881 | java-discoveryengine/proto-google-cloud-discoveryengine-v1beta/src/main/java/com/google/cloud/discoveryengine/v1beta/ListEnginesRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/discoveryengine/v1beta/engine_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.discoveryengine.v1beta;
/**
*
*
* <pre>
* Request message for
* [EngineService.ListEngines][google.cloud.discoveryengine.v1beta.EngineService.ListEngines]
* method.
* </pre>
*
* Protobuf type {@code google.cloud.discoveryengine.v1beta.ListEnginesRequest}
*/
public final class ListEnginesRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.discoveryengine.v1beta.ListEnginesRequest)
ListEnginesRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListEnginesRequest.newBuilder() to construct.
private ListEnginesRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListEnginesRequest() {
parent_ = "";
pageToken_ = "";
filter_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListEnginesRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.discoveryengine.v1beta.EngineServiceProto
.internal_static_google_cloud_discoveryengine_v1beta_ListEnginesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.discoveryengine.v1beta.EngineServiceProto
.internal_static_google_cloud_discoveryengine_v1beta_ListEnginesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.discoveryengine.v1beta.ListEnginesRequest.class,
com.google.cloud.discoveryengine.v1beta.ListEnginesRequest.Builder.class);
}
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The parent resource name, such as
* `projects/{project}/locations/{location}/collections/{collection_id}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The parent resource name, such as
* `projects/{project}/locations/{location}/collections/{collection_id}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_SIZE_FIELD_NUMBER = 2;
private int pageSize_ = 0;
/**
*
*
* <pre>
* Optional. Not supported.
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
public static final int PAGE_TOKEN_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* Optional. Not supported.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageToken.
*/
@java.lang.Override
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. Not supported.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for pageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int FILTER_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object filter_ = "";
/**
*
*
* <pre>
* Optional. Filter by solution type. For example:
* solution_type=SOLUTION_TYPE_SEARCH
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The filter.
*/
@java.lang.Override
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. Filter by solution type. For example:
* solution_type=SOLUTION_TYPE_SEARCH
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for filter.
*/
@java.lang.Override
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (pageSize_ != 0) {
output.writeInt32(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, filter_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (pageSize_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, filter_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.discoveryengine.v1beta.ListEnginesRequest)) {
return super.equals(obj);
}
com.google.cloud.discoveryengine.v1beta.ListEnginesRequest other =
(com.google.cloud.discoveryengine.v1beta.ListEnginesRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (getPageSize() != other.getPageSize()) return false;
if (!getPageToken().equals(other.getPageToken())) return false;
if (!getFilter().equals(other.getFilter())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getPageSize();
hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getPageToken().hashCode();
hash = (37 * hash) + FILTER_FIELD_NUMBER;
hash = (53 * hash) + getFilter().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.discoveryengine.v1beta.ListEnginesRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.discoveryengine.v1beta.ListEnginesRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1beta.ListEnginesRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.discoveryengine.v1beta.ListEnginesRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1beta.ListEnginesRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.discoveryengine.v1beta.ListEnginesRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1beta.ListEnginesRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.discoveryengine.v1beta.ListEnginesRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1beta.ListEnginesRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.discoveryengine.v1beta.ListEnginesRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1beta.ListEnginesRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.discoveryengine.v1beta.ListEnginesRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.discoveryengine.v1beta.ListEnginesRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for
* [EngineService.ListEngines][google.cloud.discoveryengine.v1beta.EngineService.ListEngines]
* method.
* </pre>
*
* Protobuf type {@code google.cloud.discoveryengine.v1beta.ListEnginesRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.discoveryengine.v1beta.ListEnginesRequest)
com.google.cloud.discoveryengine.v1beta.ListEnginesRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.discoveryengine.v1beta.EngineServiceProto
.internal_static_google_cloud_discoveryengine_v1beta_ListEnginesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.discoveryengine.v1beta.EngineServiceProto
.internal_static_google_cloud_discoveryengine_v1beta_ListEnginesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.discoveryengine.v1beta.ListEnginesRequest.class,
com.google.cloud.discoveryengine.v1beta.ListEnginesRequest.Builder.class);
}
// Construct using com.google.cloud.discoveryengine.v1beta.ListEnginesRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
pageSize_ = 0;
pageToken_ = "";
filter_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.discoveryengine.v1beta.EngineServiceProto
.internal_static_google_cloud_discoveryengine_v1beta_ListEnginesRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1beta.ListEnginesRequest getDefaultInstanceForType() {
return com.google.cloud.discoveryengine.v1beta.ListEnginesRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1beta.ListEnginesRequest build() {
com.google.cloud.discoveryengine.v1beta.ListEnginesRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1beta.ListEnginesRequest buildPartial() {
com.google.cloud.discoveryengine.v1beta.ListEnginesRequest result =
new com.google.cloud.discoveryengine.v1beta.ListEnginesRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.discoveryengine.v1beta.ListEnginesRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.pageSize_ = pageSize_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.pageToken_ = pageToken_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.filter_ = filter_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.discoveryengine.v1beta.ListEnginesRequest) {
return mergeFrom((com.google.cloud.discoveryengine.v1beta.ListEnginesRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.discoveryengine.v1beta.ListEnginesRequest other) {
if (other == com.google.cloud.discoveryengine.v1beta.ListEnginesRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.getPageSize() != 0) {
setPageSize(other.getPageSize());
}
if (!other.getPageToken().isEmpty()) {
pageToken_ = other.pageToken_;
bitField0_ |= 0x00000004;
onChanged();
}
if (!other.getFilter().isEmpty()) {
filter_ = other.filter_;
bitField0_ |= 0x00000008;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 16:
{
pageSize_ = input.readInt32();
bitField0_ |= 0x00000002;
break;
} // case 16
case 26:
{
pageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
case 34:
{
filter_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000008;
break;
} // case 34
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The parent resource name, such as
* `projects/{project}/locations/{location}/collections/{collection_id}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The parent resource name, such as
* `projects/{project}/locations/{location}/collections/{collection_id}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The parent resource name, such as
* `projects/{project}/locations/{location}/collections/{collection_id}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The parent resource name, such as
* `projects/{project}/locations/{location}/collections/{collection_id}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The parent resource name, such as
* `projects/{project}/locations/{location}/collections/{collection_id}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private int pageSize_;
/**
*
*
* <pre>
* Optional. Not supported.
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
/**
*
*
* <pre>
* Optional. Not supported.
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The pageSize to set.
* @return This builder for chaining.
*/
public Builder setPageSize(int value) {
pageSize_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Not supported.
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearPageSize() {
bitField0_ = (bitField0_ & ~0x00000002);
pageSize_ = 0;
onChanged();
return this;
}
private java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* Optional. Not supported.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageToken.
*/
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. Not supported.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for pageToken.
*/
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. Not supported.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Not supported.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearPageToken() {
pageToken_ = getDefaultInstance().getPageToken();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Not supported.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
private java.lang.Object filter_ = "";
/**
*
*
* <pre>
* Optional. Filter by solution type. For example:
* solution_type=SOLUTION_TYPE_SEARCH
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The filter.
*/
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. Filter by solution type. For example:
* solution_type=SOLUTION_TYPE_SEARCH
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for filter.
*/
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. Filter by solution type. For example:
* solution_type=SOLUTION_TYPE_SEARCH
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The filter to set.
* @return This builder for chaining.
*/
public Builder setFilter(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
filter_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Filter by solution type. For example:
* solution_type=SOLUTION_TYPE_SEARCH
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearFilter() {
filter_ = getDefaultInstance().getFilter();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Filter by solution type. For example:
* solution_type=SOLUTION_TYPE_SEARCH
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for filter to set.
* @return This builder for chaining.
*/
public Builder setFilterBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
filter_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.discoveryengine.v1beta.ListEnginesRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.discoveryengine.v1beta.ListEnginesRequest)
private static final com.google.cloud.discoveryengine.v1beta.ListEnginesRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.discoveryengine.v1beta.ListEnginesRequest();
}
public static com.google.cloud.discoveryengine.v1beta.ListEnginesRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListEnginesRequest> PARSER =
new com.google.protobuf.AbstractParser<ListEnginesRequest>() {
@java.lang.Override
public ListEnginesRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListEnginesRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListEnginesRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1beta.ListEnginesRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 34,883 | java-datacatalog/proto-google-cloud-datacatalog-v1/src/main/java/com/google/cloud/datacatalog/v1/ListTaxonomiesRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/datacatalog/v1/policytagmanager.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.datacatalog.v1;
/**
*
*
* <pre>
* Request message for
* [ListTaxonomies][google.cloud.datacatalog.v1.PolicyTagManager.ListTaxonomies].
* </pre>
*
* Protobuf type {@code google.cloud.datacatalog.v1.ListTaxonomiesRequest}
*/
public final class ListTaxonomiesRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.datacatalog.v1.ListTaxonomiesRequest)
ListTaxonomiesRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListTaxonomiesRequest.newBuilder() to construct.
private ListTaxonomiesRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListTaxonomiesRequest() {
parent_ = "";
pageToken_ = "";
filter_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListTaxonomiesRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.datacatalog.v1.PolicyTagManagerProto
.internal_static_google_cloud_datacatalog_v1_ListTaxonomiesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.datacatalog.v1.PolicyTagManagerProto
.internal_static_google_cloud_datacatalog_v1_ListTaxonomiesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.datacatalog.v1.ListTaxonomiesRequest.class,
com.google.cloud.datacatalog.v1.ListTaxonomiesRequest.Builder.class);
}
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. Resource name of the project to list the taxonomies of.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. Resource name of the project to list the taxonomies of.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_SIZE_FIELD_NUMBER = 2;
private int pageSize_ = 0;
/**
*
*
* <pre>
* The maximum number of items to return. Must be a value between 1 and 1000
* inclusively. If not set, defaults to 50.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
public static final int PAGE_TOKEN_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* The pagination token of the next results page. If not set,
* the first page is returned.
*
* The token is returned in the response to a previous list request.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The pageToken.
*/
@java.lang.Override
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* The pagination token of the next results page. If not set,
* the first page is returned.
*
* The token is returned in the response to a previous list request.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The bytes for pageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int FILTER_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object filter_ = "";
/**
*
*
* <pre>
* Supported field for filter is 'service' and value is 'dataplex'.
* Eg: service=dataplex.
* </pre>
*
* <code>string filter = 4;</code>
*
* @return The filter.
*/
@java.lang.Override
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
}
}
/**
*
*
* <pre>
* Supported field for filter is 'service' and value is 'dataplex'.
* Eg: service=dataplex.
* </pre>
*
* <code>string filter = 4;</code>
*
* @return The bytes for filter.
*/
@java.lang.Override
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (pageSize_ != 0) {
output.writeInt32(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, filter_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (pageSize_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, filter_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.datacatalog.v1.ListTaxonomiesRequest)) {
return super.equals(obj);
}
com.google.cloud.datacatalog.v1.ListTaxonomiesRequest other =
(com.google.cloud.datacatalog.v1.ListTaxonomiesRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (getPageSize() != other.getPageSize()) return false;
if (!getPageToken().equals(other.getPageToken())) return false;
if (!getFilter().equals(other.getFilter())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getPageSize();
hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getPageToken().hashCode();
hash = (37 * hash) + FILTER_FIELD_NUMBER;
hash = (53 * hash) + getFilter().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.datacatalog.v1.ListTaxonomiesRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datacatalog.v1.ListTaxonomiesRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datacatalog.v1.ListTaxonomiesRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datacatalog.v1.ListTaxonomiesRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datacatalog.v1.ListTaxonomiesRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datacatalog.v1.ListTaxonomiesRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datacatalog.v1.ListTaxonomiesRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.datacatalog.v1.ListTaxonomiesRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.datacatalog.v1.ListTaxonomiesRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.datacatalog.v1.ListTaxonomiesRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.datacatalog.v1.ListTaxonomiesRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.datacatalog.v1.ListTaxonomiesRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.datacatalog.v1.ListTaxonomiesRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for
* [ListTaxonomies][google.cloud.datacatalog.v1.PolicyTagManager.ListTaxonomies].
* </pre>
*
* Protobuf type {@code google.cloud.datacatalog.v1.ListTaxonomiesRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.datacatalog.v1.ListTaxonomiesRequest)
com.google.cloud.datacatalog.v1.ListTaxonomiesRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.datacatalog.v1.PolicyTagManagerProto
.internal_static_google_cloud_datacatalog_v1_ListTaxonomiesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.datacatalog.v1.PolicyTagManagerProto
.internal_static_google_cloud_datacatalog_v1_ListTaxonomiesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.datacatalog.v1.ListTaxonomiesRequest.class,
com.google.cloud.datacatalog.v1.ListTaxonomiesRequest.Builder.class);
}
// Construct using com.google.cloud.datacatalog.v1.ListTaxonomiesRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
pageSize_ = 0;
pageToken_ = "";
filter_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.datacatalog.v1.PolicyTagManagerProto
.internal_static_google_cloud_datacatalog_v1_ListTaxonomiesRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.datacatalog.v1.ListTaxonomiesRequest getDefaultInstanceForType() {
return com.google.cloud.datacatalog.v1.ListTaxonomiesRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.datacatalog.v1.ListTaxonomiesRequest build() {
com.google.cloud.datacatalog.v1.ListTaxonomiesRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.datacatalog.v1.ListTaxonomiesRequest buildPartial() {
com.google.cloud.datacatalog.v1.ListTaxonomiesRequest result =
new com.google.cloud.datacatalog.v1.ListTaxonomiesRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.datacatalog.v1.ListTaxonomiesRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.pageSize_ = pageSize_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.pageToken_ = pageToken_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.filter_ = filter_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.datacatalog.v1.ListTaxonomiesRequest) {
return mergeFrom((com.google.cloud.datacatalog.v1.ListTaxonomiesRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.datacatalog.v1.ListTaxonomiesRequest other) {
if (other == com.google.cloud.datacatalog.v1.ListTaxonomiesRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.getPageSize() != 0) {
setPageSize(other.getPageSize());
}
if (!other.getPageToken().isEmpty()) {
pageToken_ = other.pageToken_;
bitField0_ |= 0x00000004;
onChanged();
}
if (!other.getFilter().isEmpty()) {
filter_ = other.filter_;
bitField0_ |= 0x00000008;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 16:
{
pageSize_ = input.readInt32();
bitField0_ |= 0x00000002;
break;
} // case 16
case 26:
{
pageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
case 34:
{
filter_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000008;
break;
} // case 34
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. Resource name of the project to list the taxonomies of.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. Resource name of the project to list the taxonomies of.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. Resource name of the project to list the taxonomies of.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Resource name of the project to list the taxonomies of.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Resource name of the project to list the taxonomies of.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private int pageSize_;
/**
*
*
* <pre>
* The maximum number of items to return. Must be a value between 1 and 1000
* inclusively. If not set, defaults to 50.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
/**
*
*
* <pre>
* The maximum number of items to return. Must be a value between 1 and 1000
* inclusively. If not set, defaults to 50.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @param value The pageSize to set.
* @return This builder for chaining.
*/
public Builder setPageSize(int value) {
pageSize_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The maximum number of items to return. Must be a value between 1 and 1000
* inclusively. If not set, defaults to 50.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageSize() {
bitField0_ = (bitField0_ & ~0x00000002);
pageSize_ = 0;
onChanged();
return this;
}
private java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* The pagination token of the next results page. If not set,
* the first page is returned.
*
* The token is returned in the response to a previous list request.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The pageToken.
*/
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The pagination token of the next results page. If not set,
* the first page is returned.
*
* The token is returned in the response to a previous list request.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The bytes for pageToken.
*/
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The pagination token of the next results page. If not set,
* the first page is returned.
*
* The token is returned in the response to a previous list request.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @param value The pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* The pagination token of the next results page. If not set,
* the first page is returned.
*
* The token is returned in the response to a previous list request.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageToken() {
pageToken_ = getDefaultInstance().getPageToken();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* The pagination token of the next results page. If not set,
* the first page is returned.
*
* The token is returned in the response to a previous list request.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @param value The bytes for pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
private java.lang.Object filter_ = "";
/**
*
*
* <pre>
* Supported field for filter is 'service' and value is 'dataplex'.
* Eg: service=dataplex.
* </pre>
*
* <code>string filter = 4;</code>
*
* @return The filter.
*/
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Supported field for filter is 'service' and value is 'dataplex'.
* Eg: service=dataplex.
* </pre>
*
* <code>string filter = 4;</code>
*
* @return The bytes for filter.
*/
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Supported field for filter is 'service' and value is 'dataplex'.
* Eg: service=dataplex.
* </pre>
*
* <code>string filter = 4;</code>
*
* @param value The filter to set.
* @return This builder for chaining.
*/
public Builder setFilter(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
filter_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* Supported field for filter is 'service' and value is 'dataplex'.
* Eg: service=dataplex.
* </pre>
*
* <code>string filter = 4;</code>
*
* @return This builder for chaining.
*/
public Builder clearFilter() {
filter_ = getDefaultInstance().getFilter();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
*
*
* <pre>
* Supported field for filter is 'service' and value is 'dataplex'.
* Eg: service=dataplex.
* </pre>
*
* <code>string filter = 4;</code>
*
* @param value The bytes for filter to set.
* @return This builder for chaining.
*/
public Builder setFilterBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
filter_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.datacatalog.v1.ListTaxonomiesRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.datacatalog.v1.ListTaxonomiesRequest)
private static final com.google.cloud.datacatalog.v1.ListTaxonomiesRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.datacatalog.v1.ListTaxonomiesRequest();
}
public static com.google.cloud.datacatalog.v1.ListTaxonomiesRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListTaxonomiesRequest> PARSER =
new com.google.protobuf.AbstractParser<ListTaxonomiesRequest>() {
@java.lang.Override
public ListTaxonomiesRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListTaxonomiesRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListTaxonomiesRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.datacatalog.v1.ListTaxonomiesRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 34,994 | java-networkconnectivity/google-cloud-networkconnectivity/src/test/java/com/google/cloud/networkconnectivity/v1/InternalRangeServiceClientTest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.networkconnectivity.v1;
import static com.google.cloud.networkconnectivity.v1.InternalRangeServiceClient.ListInternalRangesPagedResponse;
import static com.google.cloud.networkconnectivity.v1.InternalRangeServiceClient.ListLocationsPagedResponse;
import com.google.api.gax.core.NoCredentialsProvider;
import com.google.api.gax.grpc.GaxGrpcProperties;
import com.google.api.gax.grpc.testing.LocalChannelProvider;
import com.google.api.gax.grpc.testing.MockGrpcService;
import com.google.api.gax.grpc.testing.MockServiceHelper;
import com.google.api.gax.rpc.ApiClientHeaderProvider;
import com.google.api.gax.rpc.InvalidArgumentException;
import com.google.api.gax.rpc.StatusCode;
import com.google.cloud.location.GetLocationRequest;
import com.google.cloud.location.ListLocationsRequest;
import com.google.cloud.location.ListLocationsResponse;
import com.google.cloud.location.Location;
import com.google.common.collect.Lists;
import com.google.iam.v1.AuditConfig;
import com.google.iam.v1.Binding;
import com.google.iam.v1.GetIamPolicyRequest;
import com.google.iam.v1.GetPolicyOptions;
import com.google.iam.v1.Policy;
import com.google.iam.v1.SetIamPolicyRequest;
import com.google.iam.v1.TestIamPermissionsRequest;
import com.google.iam.v1.TestIamPermissionsResponse;
import com.google.longrunning.Operation;
import com.google.protobuf.AbstractMessage;
import com.google.protobuf.Any;
import com.google.protobuf.ByteString;
import com.google.protobuf.Empty;
import com.google.protobuf.FieldMask;
import com.google.protobuf.Timestamp;
import io.grpc.StatusRuntimeException;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.UUID;
import java.util.concurrent.ExecutionException;
import javax.annotation.Generated;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
@Generated("by gapic-generator-java")
public class InternalRangeServiceClientTest {
private static MockIAMPolicy mockIAMPolicy;
private static MockInternalRangeService mockInternalRangeService;
private static MockLocations mockLocations;
private static MockServiceHelper mockServiceHelper;
private LocalChannelProvider channelProvider;
private InternalRangeServiceClient client;
@BeforeClass
public static void startStaticServer() {
mockInternalRangeService = new MockInternalRangeService();
mockLocations = new MockLocations();
mockIAMPolicy = new MockIAMPolicy();
mockServiceHelper =
new MockServiceHelper(
UUID.randomUUID().toString(),
Arrays.<MockGrpcService>asList(mockInternalRangeService, mockLocations, mockIAMPolicy));
mockServiceHelper.start();
}
@AfterClass
public static void stopServer() {
mockServiceHelper.stop();
}
@Before
public void setUp() throws IOException {
mockServiceHelper.reset();
channelProvider = mockServiceHelper.createChannelProvider();
InternalRangeServiceSettings settings =
InternalRangeServiceSettings.newBuilder()
.setTransportChannelProvider(channelProvider)
.setCredentialsProvider(NoCredentialsProvider.create())
.build();
client = InternalRangeServiceClient.create(settings);
}
@After
public void tearDown() throws Exception {
client.close();
}
@Test
public void listInternalRangesTest() throws Exception {
InternalRange responsesElement = InternalRange.newBuilder().build();
ListInternalRangesResponse expectedResponse =
ListInternalRangesResponse.newBuilder()
.setNextPageToken("")
.addAllInternalRanges(Arrays.asList(responsesElement))
.build();
mockInternalRangeService.addResponse(expectedResponse);
LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]");
ListInternalRangesPagedResponse pagedListResponse = client.listInternalRanges(parent);
List<InternalRange> resources = Lists.newArrayList(pagedListResponse.iterateAll());
Assert.assertEquals(1, resources.size());
Assert.assertEquals(expectedResponse.getInternalRangesList().get(0), resources.get(0));
List<AbstractMessage> actualRequests = mockInternalRangeService.getRequests();
Assert.assertEquals(1, actualRequests.size());
ListInternalRangesRequest actualRequest = ((ListInternalRangesRequest) actualRequests.get(0));
Assert.assertEquals(parent.toString(), actualRequest.getParent());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void listInternalRangesExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockInternalRangeService.addException(exception);
try {
LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]");
client.listInternalRanges(parent);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void listInternalRangesTest2() throws Exception {
InternalRange responsesElement = InternalRange.newBuilder().build();
ListInternalRangesResponse expectedResponse =
ListInternalRangesResponse.newBuilder()
.setNextPageToken("")
.addAllInternalRanges(Arrays.asList(responsesElement))
.build();
mockInternalRangeService.addResponse(expectedResponse);
String parent = "parent-995424086";
ListInternalRangesPagedResponse pagedListResponse = client.listInternalRanges(parent);
List<InternalRange> resources = Lists.newArrayList(pagedListResponse.iterateAll());
Assert.assertEquals(1, resources.size());
Assert.assertEquals(expectedResponse.getInternalRangesList().get(0), resources.get(0));
List<AbstractMessage> actualRequests = mockInternalRangeService.getRequests();
Assert.assertEquals(1, actualRequests.size());
ListInternalRangesRequest actualRequest = ((ListInternalRangesRequest) actualRequests.get(0));
Assert.assertEquals(parent, actualRequest.getParent());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void listInternalRangesExceptionTest2() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockInternalRangeService.addException(exception);
try {
String parent = "parent-995424086";
client.listInternalRanges(parent);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void getInternalRangeTest() throws Exception {
InternalRange expectedResponse =
InternalRange.newBuilder()
.setName(InternalRangeName.of("[PROJECT]", "[LOCATION]", "[INTERNAL_RANGE]").toString())
.setCreateTime(Timestamp.newBuilder().build())
.setUpdateTime(Timestamp.newBuilder().build())
.putAllLabels(new HashMap<String, String>())
.setDescription("description-1724546052")
.setIpCidrRange("ipCidrRange-866375486")
.setNetwork("network1843485230")
.setPrefixLength(-1157046989)
.addAllTargetCidrRange(new ArrayList<String>())
.addAllUsers(new ArrayList<String>())
.addAllOverlaps(new ArrayList<InternalRange.Overlap>())
.setMigration(InternalRange.Migration.newBuilder().build())
.setImmutable(true)
.setAllocationOptions(InternalRange.AllocationOptions.newBuilder().build())
.addAllExcludeCidrRanges(new ArrayList<String>())
.build();
mockInternalRangeService.addResponse(expectedResponse);
InternalRangeName name = InternalRangeName.of("[PROJECT]", "[LOCATION]", "[INTERNAL_RANGE]");
InternalRange actualResponse = client.getInternalRange(name);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockInternalRangeService.getRequests();
Assert.assertEquals(1, actualRequests.size());
GetInternalRangeRequest actualRequest = ((GetInternalRangeRequest) actualRequests.get(0));
Assert.assertEquals(name.toString(), actualRequest.getName());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void getInternalRangeExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockInternalRangeService.addException(exception);
try {
InternalRangeName name = InternalRangeName.of("[PROJECT]", "[LOCATION]", "[INTERNAL_RANGE]");
client.getInternalRange(name);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void getInternalRangeTest2() throws Exception {
InternalRange expectedResponse =
InternalRange.newBuilder()
.setName(InternalRangeName.of("[PROJECT]", "[LOCATION]", "[INTERNAL_RANGE]").toString())
.setCreateTime(Timestamp.newBuilder().build())
.setUpdateTime(Timestamp.newBuilder().build())
.putAllLabels(new HashMap<String, String>())
.setDescription("description-1724546052")
.setIpCidrRange("ipCidrRange-866375486")
.setNetwork("network1843485230")
.setPrefixLength(-1157046989)
.addAllTargetCidrRange(new ArrayList<String>())
.addAllUsers(new ArrayList<String>())
.addAllOverlaps(new ArrayList<InternalRange.Overlap>())
.setMigration(InternalRange.Migration.newBuilder().build())
.setImmutable(true)
.setAllocationOptions(InternalRange.AllocationOptions.newBuilder().build())
.addAllExcludeCidrRanges(new ArrayList<String>())
.build();
mockInternalRangeService.addResponse(expectedResponse);
String name = "name3373707";
InternalRange actualResponse = client.getInternalRange(name);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockInternalRangeService.getRequests();
Assert.assertEquals(1, actualRequests.size());
GetInternalRangeRequest actualRequest = ((GetInternalRangeRequest) actualRequests.get(0));
Assert.assertEquals(name, actualRequest.getName());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void getInternalRangeExceptionTest2() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockInternalRangeService.addException(exception);
try {
String name = "name3373707";
client.getInternalRange(name);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void createInternalRangeTest() throws Exception {
InternalRange expectedResponse =
InternalRange.newBuilder()
.setName(InternalRangeName.of("[PROJECT]", "[LOCATION]", "[INTERNAL_RANGE]").toString())
.setCreateTime(Timestamp.newBuilder().build())
.setUpdateTime(Timestamp.newBuilder().build())
.putAllLabels(new HashMap<String, String>())
.setDescription("description-1724546052")
.setIpCidrRange("ipCidrRange-866375486")
.setNetwork("network1843485230")
.setPrefixLength(-1157046989)
.addAllTargetCidrRange(new ArrayList<String>())
.addAllUsers(new ArrayList<String>())
.addAllOverlaps(new ArrayList<InternalRange.Overlap>())
.setMigration(InternalRange.Migration.newBuilder().build())
.setImmutable(true)
.setAllocationOptions(InternalRange.AllocationOptions.newBuilder().build())
.addAllExcludeCidrRanges(new ArrayList<String>())
.build();
Operation resultOperation =
Operation.newBuilder()
.setName("createInternalRangeTest")
.setDone(true)
.setResponse(Any.pack(expectedResponse))
.build();
mockInternalRangeService.addResponse(resultOperation);
LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]");
InternalRange internalRange = InternalRange.newBuilder().build();
String internalRangeId = "internalRangeId-392750661";
InternalRange actualResponse =
client.createInternalRangeAsync(parent, internalRange, internalRangeId).get();
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockInternalRangeService.getRequests();
Assert.assertEquals(1, actualRequests.size());
CreateInternalRangeRequest actualRequest = ((CreateInternalRangeRequest) actualRequests.get(0));
Assert.assertEquals(parent.toString(), actualRequest.getParent());
Assert.assertEquals(internalRange, actualRequest.getInternalRange());
Assert.assertEquals(internalRangeId, actualRequest.getInternalRangeId());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void createInternalRangeExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockInternalRangeService.addException(exception);
try {
LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]");
InternalRange internalRange = InternalRange.newBuilder().build();
String internalRangeId = "internalRangeId-392750661";
client.createInternalRangeAsync(parent, internalRange, internalRangeId).get();
Assert.fail("No exception raised");
} catch (ExecutionException e) {
Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass());
InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause());
Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode());
}
}
@Test
public void createInternalRangeTest2() throws Exception {
InternalRange expectedResponse =
InternalRange.newBuilder()
.setName(InternalRangeName.of("[PROJECT]", "[LOCATION]", "[INTERNAL_RANGE]").toString())
.setCreateTime(Timestamp.newBuilder().build())
.setUpdateTime(Timestamp.newBuilder().build())
.putAllLabels(new HashMap<String, String>())
.setDescription("description-1724546052")
.setIpCidrRange("ipCidrRange-866375486")
.setNetwork("network1843485230")
.setPrefixLength(-1157046989)
.addAllTargetCidrRange(new ArrayList<String>())
.addAllUsers(new ArrayList<String>())
.addAllOverlaps(new ArrayList<InternalRange.Overlap>())
.setMigration(InternalRange.Migration.newBuilder().build())
.setImmutable(true)
.setAllocationOptions(InternalRange.AllocationOptions.newBuilder().build())
.addAllExcludeCidrRanges(new ArrayList<String>())
.build();
Operation resultOperation =
Operation.newBuilder()
.setName("createInternalRangeTest")
.setDone(true)
.setResponse(Any.pack(expectedResponse))
.build();
mockInternalRangeService.addResponse(resultOperation);
String parent = "parent-995424086";
InternalRange internalRange = InternalRange.newBuilder().build();
String internalRangeId = "internalRangeId-392750661";
InternalRange actualResponse =
client.createInternalRangeAsync(parent, internalRange, internalRangeId).get();
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockInternalRangeService.getRequests();
Assert.assertEquals(1, actualRequests.size());
CreateInternalRangeRequest actualRequest = ((CreateInternalRangeRequest) actualRequests.get(0));
Assert.assertEquals(parent, actualRequest.getParent());
Assert.assertEquals(internalRange, actualRequest.getInternalRange());
Assert.assertEquals(internalRangeId, actualRequest.getInternalRangeId());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void createInternalRangeExceptionTest2() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockInternalRangeService.addException(exception);
try {
String parent = "parent-995424086";
InternalRange internalRange = InternalRange.newBuilder().build();
String internalRangeId = "internalRangeId-392750661";
client.createInternalRangeAsync(parent, internalRange, internalRangeId).get();
Assert.fail("No exception raised");
} catch (ExecutionException e) {
Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass());
InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause());
Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode());
}
}
@Test
public void updateInternalRangeTest() throws Exception {
InternalRange expectedResponse =
InternalRange.newBuilder()
.setName(InternalRangeName.of("[PROJECT]", "[LOCATION]", "[INTERNAL_RANGE]").toString())
.setCreateTime(Timestamp.newBuilder().build())
.setUpdateTime(Timestamp.newBuilder().build())
.putAllLabels(new HashMap<String, String>())
.setDescription("description-1724546052")
.setIpCidrRange("ipCidrRange-866375486")
.setNetwork("network1843485230")
.setPrefixLength(-1157046989)
.addAllTargetCidrRange(new ArrayList<String>())
.addAllUsers(new ArrayList<String>())
.addAllOverlaps(new ArrayList<InternalRange.Overlap>())
.setMigration(InternalRange.Migration.newBuilder().build())
.setImmutable(true)
.setAllocationOptions(InternalRange.AllocationOptions.newBuilder().build())
.addAllExcludeCidrRanges(new ArrayList<String>())
.build();
Operation resultOperation =
Operation.newBuilder()
.setName("updateInternalRangeTest")
.setDone(true)
.setResponse(Any.pack(expectedResponse))
.build();
mockInternalRangeService.addResponse(resultOperation);
InternalRange internalRange = InternalRange.newBuilder().build();
FieldMask updateMask = FieldMask.newBuilder().build();
InternalRange actualResponse = client.updateInternalRangeAsync(internalRange, updateMask).get();
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockInternalRangeService.getRequests();
Assert.assertEquals(1, actualRequests.size());
UpdateInternalRangeRequest actualRequest = ((UpdateInternalRangeRequest) actualRequests.get(0));
Assert.assertEquals(internalRange, actualRequest.getInternalRange());
Assert.assertEquals(updateMask, actualRequest.getUpdateMask());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void updateInternalRangeExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockInternalRangeService.addException(exception);
try {
InternalRange internalRange = InternalRange.newBuilder().build();
FieldMask updateMask = FieldMask.newBuilder().build();
client.updateInternalRangeAsync(internalRange, updateMask).get();
Assert.fail("No exception raised");
} catch (ExecutionException e) {
Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass());
InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause());
Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode());
}
}
@Test
public void deleteInternalRangeTest() throws Exception {
Empty expectedResponse = Empty.newBuilder().build();
Operation resultOperation =
Operation.newBuilder()
.setName("deleteInternalRangeTest")
.setDone(true)
.setResponse(Any.pack(expectedResponse))
.build();
mockInternalRangeService.addResponse(resultOperation);
InternalRangeName name = InternalRangeName.of("[PROJECT]", "[LOCATION]", "[INTERNAL_RANGE]");
client.deleteInternalRangeAsync(name).get();
List<AbstractMessage> actualRequests = mockInternalRangeService.getRequests();
Assert.assertEquals(1, actualRequests.size());
DeleteInternalRangeRequest actualRequest = ((DeleteInternalRangeRequest) actualRequests.get(0));
Assert.assertEquals(name.toString(), actualRequest.getName());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void deleteInternalRangeExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockInternalRangeService.addException(exception);
try {
InternalRangeName name = InternalRangeName.of("[PROJECT]", "[LOCATION]", "[INTERNAL_RANGE]");
client.deleteInternalRangeAsync(name).get();
Assert.fail("No exception raised");
} catch (ExecutionException e) {
Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass());
InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause());
Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode());
}
}
@Test
public void deleteInternalRangeTest2() throws Exception {
Empty expectedResponse = Empty.newBuilder().build();
Operation resultOperation =
Operation.newBuilder()
.setName("deleteInternalRangeTest")
.setDone(true)
.setResponse(Any.pack(expectedResponse))
.build();
mockInternalRangeService.addResponse(resultOperation);
String name = "name3373707";
client.deleteInternalRangeAsync(name).get();
List<AbstractMessage> actualRequests = mockInternalRangeService.getRequests();
Assert.assertEquals(1, actualRequests.size());
DeleteInternalRangeRequest actualRequest = ((DeleteInternalRangeRequest) actualRequests.get(0));
Assert.assertEquals(name, actualRequest.getName());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void deleteInternalRangeExceptionTest2() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockInternalRangeService.addException(exception);
try {
String name = "name3373707";
client.deleteInternalRangeAsync(name).get();
Assert.fail("No exception raised");
} catch (ExecutionException e) {
Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass());
InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause());
Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode());
}
}
@Test
public void listLocationsTest() throws Exception {
Location responsesElement = Location.newBuilder().build();
ListLocationsResponse expectedResponse =
ListLocationsResponse.newBuilder()
.setNextPageToken("")
.addAllLocations(Arrays.asList(responsesElement))
.build();
mockLocations.addResponse(expectedResponse);
ListLocationsRequest request =
ListLocationsRequest.newBuilder()
.setName("name3373707")
.setFilter("filter-1274492040")
.setPageSize(883849137)
.setPageToken("pageToken873572522")
.build();
ListLocationsPagedResponse pagedListResponse = client.listLocations(request);
List<Location> resources = Lists.newArrayList(pagedListResponse.iterateAll());
Assert.assertEquals(1, resources.size());
Assert.assertEquals(expectedResponse.getLocationsList().get(0), resources.get(0));
List<AbstractMessage> actualRequests = mockLocations.getRequests();
Assert.assertEquals(1, actualRequests.size());
ListLocationsRequest actualRequest = ((ListLocationsRequest) actualRequests.get(0));
Assert.assertEquals(request.getName(), actualRequest.getName());
Assert.assertEquals(request.getFilter(), actualRequest.getFilter());
Assert.assertEquals(request.getPageSize(), actualRequest.getPageSize());
Assert.assertEquals(request.getPageToken(), actualRequest.getPageToken());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void listLocationsExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockLocations.addException(exception);
try {
ListLocationsRequest request =
ListLocationsRequest.newBuilder()
.setName("name3373707")
.setFilter("filter-1274492040")
.setPageSize(883849137)
.setPageToken("pageToken873572522")
.build();
client.listLocations(request);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void getLocationTest() throws Exception {
Location expectedResponse =
Location.newBuilder()
.setName("name3373707")
.setLocationId("locationId1541836720")
.setDisplayName("displayName1714148973")
.putAllLabels(new HashMap<String, String>())
.setMetadata(Any.newBuilder().build())
.build();
mockLocations.addResponse(expectedResponse);
GetLocationRequest request = GetLocationRequest.newBuilder().setName("name3373707").build();
Location actualResponse = client.getLocation(request);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockLocations.getRequests();
Assert.assertEquals(1, actualRequests.size());
GetLocationRequest actualRequest = ((GetLocationRequest) actualRequests.get(0));
Assert.assertEquals(request.getName(), actualRequest.getName());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void getLocationExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockLocations.addException(exception);
try {
GetLocationRequest request = GetLocationRequest.newBuilder().setName("name3373707").build();
client.getLocation(request);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void setIamPolicyTest() throws Exception {
Policy expectedResponse =
Policy.newBuilder()
.setVersion(351608024)
.addAllBindings(new ArrayList<Binding>())
.addAllAuditConfigs(new ArrayList<AuditConfig>())
.setEtag(ByteString.EMPTY)
.build();
mockIAMPolicy.addResponse(expectedResponse);
SetIamPolicyRequest request =
SetIamPolicyRequest.newBuilder()
.setResource(GroupName.of("[PROJECT]", "[HUB]", "[GROUP]").toString())
.setPolicy(Policy.newBuilder().build())
.setUpdateMask(FieldMask.newBuilder().build())
.build();
Policy actualResponse = client.setIamPolicy(request);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockIAMPolicy.getRequests();
Assert.assertEquals(1, actualRequests.size());
SetIamPolicyRequest actualRequest = ((SetIamPolicyRequest) actualRequests.get(0));
Assert.assertEquals(request.getResource(), actualRequest.getResource());
Assert.assertEquals(request.getPolicy(), actualRequest.getPolicy());
Assert.assertEquals(request.getUpdateMask(), actualRequest.getUpdateMask());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void setIamPolicyExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockIAMPolicy.addException(exception);
try {
SetIamPolicyRequest request =
SetIamPolicyRequest.newBuilder()
.setResource(GroupName.of("[PROJECT]", "[HUB]", "[GROUP]").toString())
.setPolicy(Policy.newBuilder().build())
.setUpdateMask(FieldMask.newBuilder().build())
.build();
client.setIamPolicy(request);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void getIamPolicyTest() throws Exception {
Policy expectedResponse =
Policy.newBuilder()
.setVersion(351608024)
.addAllBindings(new ArrayList<Binding>())
.addAllAuditConfigs(new ArrayList<AuditConfig>())
.setEtag(ByteString.EMPTY)
.build();
mockIAMPolicy.addResponse(expectedResponse);
GetIamPolicyRequest request =
GetIamPolicyRequest.newBuilder()
.setResource(GroupName.of("[PROJECT]", "[HUB]", "[GROUP]").toString())
.setOptions(GetPolicyOptions.newBuilder().build())
.build();
Policy actualResponse = client.getIamPolicy(request);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockIAMPolicy.getRequests();
Assert.assertEquals(1, actualRequests.size());
GetIamPolicyRequest actualRequest = ((GetIamPolicyRequest) actualRequests.get(0));
Assert.assertEquals(request.getResource(), actualRequest.getResource());
Assert.assertEquals(request.getOptions(), actualRequest.getOptions());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void getIamPolicyExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockIAMPolicy.addException(exception);
try {
GetIamPolicyRequest request =
GetIamPolicyRequest.newBuilder()
.setResource(GroupName.of("[PROJECT]", "[HUB]", "[GROUP]").toString())
.setOptions(GetPolicyOptions.newBuilder().build())
.build();
client.getIamPolicy(request);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void testIamPermissionsTest() throws Exception {
TestIamPermissionsResponse expectedResponse =
TestIamPermissionsResponse.newBuilder().addAllPermissions(new ArrayList<String>()).build();
mockIAMPolicy.addResponse(expectedResponse);
TestIamPermissionsRequest request =
TestIamPermissionsRequest.newBuilder()
.setResource(GroupName.of("[PROJECT]", "[HUB]", "[GROUP]").toString())
.addAllPermissions(new ArrayList<String>())
.build();
TestIamPermissionsResponse actualResponse = client.testIamPermissions(request);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockIAMPolicy.getRequests();
Assert.assertEquals(1, actualRequests.size());
TestIamPermissionsRequest actualRequest = ((TestIamPermissionsRequest) actualRequests.get(0));
Assert.assertEquals(request.getResource(), actualRequest.getResource());
Assert.assertEquals(request.getPermissionsList(), actualRequest.getPermissionsList());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void testIamPermissionsExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockIAMPolicy.addException(exception);
try {
TestIamPermissionsRequest request =
TestIamPermissionsRequest.newBuilder()
.setResource(GroupName.of("[PROJECT]", "[HUB]", "[GROUP]").toString())
.addAllPermissions(new ArrayList<String>())
.build();
client.testIamPermissions(request);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
}
|
apache/rocketmq | 35,251 | client/src/main/java/org/apache/rocketmq/client/impl/consumer/RebalanceImpl.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.rocketmq.client.impl.consumer;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import org.apache.rocketmq.client.consumer.AllocateMessageQueueStrategy;
import org.apache.rocketmq.client.exception.MQClientException;
import org.apache.rocketmq.client.impl.FindBrokerResult;
import org.apache.rocketmq.client.impl.factory.MQClientInstance;
import org.apache.rocketmq.common.KeyBuilder;
import org.apache.rocketmq.common.MixAll;
import org.apache.rocketmq.common.message.MessageQueue;
import org.apache.rocketmq.common.message.MessageQueueAssignment;
import org.apache.rocketmq.common.message.MessageRequestMode;
import org.apache.rocketmq.remoting.protocol.body.LockBatchRequestBody;
import org.apache.rocketmq.remoting.protocol.body.UnlockBatchRequestBody;
import org.apache.rocketmq.remoting.protocol.filter.FilterAPI;
import org.apache.rocketmq.remoting.protocol.heartbeat.ConsumeType;
import org.apache.rocketmq.remoting.protocol.heartbeat.MessageModel;
import org.apache.rocketmq.remoting.protocol.heartbeat.SubscriptionData;
import org.apache.rocketmq.logging.org.slf4j.Logger;
import org.apache.rocketmq.logging.org.slf4j.LoggerFactory;
public abstract class RebalanceImpl {
protected static final Logger log = LoggerFactory.getLogger(RebalanceImpl.class);
protected final ConcurrentMap<MessageQueue, ProcessQueue> processQueueTable = new ConcurrentHashMap<>(64);
protected final ConcurrentMap<MessageQueue, PopProcessQueue> popProcessQueueTable = new ConcurrentHashMap<>(64);
protected final ConcurrentMap<String/* topic */, Set<MessageQueue>> topicSubscribeInfoTable =
new ConcurrentHashMap<>();
protected final ConcurrentMap<String /* topic */, SubscriptionData> subscriptionInner =
new ConcurrentHashMap<>();
protected String consumerGroup;
protected MessageModel messageModel;
protected AllocateMessageQueueStrategy allocateMessageQueueStrategy;
protected MQClientInstance mQClientFactory;
private static final int QUERY_ASSIGNMENT_TIMEOUT = 3000;
public RebalanceImpl(String consumerGroup, MessageModel messageModel,
AllocateMessageQueueStrategy allocateMessageQueueStrategy,
MQClientInstance mQClientFactory) {
this.consumerGroup = consumerGroup;
this.messageModel = messageModel;
this.allocateMessageQueueStrategy = allocateMessageQueueStrategy;
this.mQClientFactory = mQClientFactory;
}
public void unlock(final MessageQueue mq, final boolean oneway) {
FindBrokerResult findBrokerResult = this.mQClientFactory.findBrokerAddressInSubscribe(this.mQClientFactory.getBrokerNameFromMessageQueue(mq), MixAll.MASTER_ID, true);
if (findBrokerResult != null) {
UnlockBatchRequestBody requestBody = new UnlockBatchRequestBody();
requestBody.setConsumerGroup(this.consumerGroup);
requestBody.setClientId(this.mQClientFactory.getClientId());
requestBody.getMqSet().add(mq);
try {
this.mQClientFactory.getMQClientAPIImpl().unlockBatchMQ(findBrokerResult.getBrokerAddr(), requestBody, 1000, oneway);
log.warn("unlock messageQueue. group:{}, clientId:{}, mq:{}",
this.consumerGroup,
this.mQClientFactory.getClientId(),
mq);
} catch (Exception e) {
log.error("unlockBatchMQ exception, " + mq, e);
}
}
}
public void unlockAll(final boolean oneway) {
HashMap<String, Set<MessageQueue>> brokerMqs = this.buildProcessQueueTableByBrokerName();
for (final Map.Entry<String, Set<MessageQueue>> entry : brokerMqs.entrySet()) {
final String brokerName = entry.getKey();
final Set<MessageQueue> mqs = entry.getValue();
if (mqs.isEmpty()) {
continue;
}
FindBrokerResult findBrokerResult = this.mQClientFactory.findBrokerAddressInSubscribe(brokerName, MixAll.MASTER_ID, true);
if (findBrokerResult != null) {
UnlockBatchRequestBody requestBody = new UnlockBatchRequestBody();
requestBody.setConsumerGroup(this.consumerGroup);
requestBody.setClientId(this.mQClientFactory.getClientId());
requestBody.setMqSet(mqs);
try {
this.mQClientFactory.getMQClientAPIImpl().unlockBatchMQ(findBrokerResult.getBrokerAddr(), requestBody, 1000, oneway);
for (MessageQueue mq : mqs) {
ProcessQueue processQueue = this.processQueueTable.get(mq);
if (processQueue != null) {
processQueue.setLocked(false);
log.info("the message queue unlock OK, Group: {} {}", this.consumerGroup, mq);
}
}
} catch (Exception e) {
log.error("unlockBatchMQ exception, " + mqs, e);
}
}
}
}
private HashMap<String/* brokerName */, Set<MessageQueue>> buildProcessQueueTableByBrokerName() {
HashMap<String, Set<MessageQueue>> result = new HashMap<>();
for (Map.Entry<MessageQueue, ProcessQueue> entry : this.processQueueTable.entrySet()) {
MessageQueue mq = entry.getKey();
ProcessQueue pq = entry.getValue();
if (pq.isDropped()) {
continue;
}
String destBrokerName = this.mQClientFactory.getBrokerNameFromMessageQueue(mq);
Set<MessageQueue> mqs = result.get(destBrokerName);
if (null == mqs) {
mqs = new HashSet<>();
result.put(mq.getBrokerName(), mqs);
}
mqs.add(mq);
}
return result;
}
public boolean lock(final MessageQueue mq) {
FindBrokerResult findBrokerResult = this.mQClientFactory.findBrokerAddressInSubscribe(this.mQClientFactory.getBrokerNameFromMessageQueue(mq), MixAll.MASTER_ID, true);
if (findBrokerResult != null) {
LockBatchRequestBody requestBody = new LockBatchRequestBody();
requestBody.setConsumerGroup(this.consumerGroup);
requestBody.setClientId(this.mQClientFactory.getClientId());
requestBody.getMqSet().add(mq);
try {
Set<MessageQueue> lockedMq =
this.mQClientFactory.getMQClientAPIImpl().lockBatchMQ(findBrokerResult.getBrokerAddr(), requestBody, 1000);
for (MessageQueue mmqq : lockedMq) {
ProcessQueue processQueue = this.processQueueTable.get(mmqq);
if (processQueue != null) {
processQueue.setLocked(true);
processQueue.setLastLockTimestamp(System.currentTimeMillis());
}
}
boolean lockOK = lockedMq.contains(mq);
log.info("message queue lock {}, {} {}", lockOK ? "OK" : "Failed", this.consumerGroup, mq);
return lockOK;
} catch (Exception e) {
log.error("lockBatchMQ exception, " + mq, e);
}
}
return false;
}
public void lockAll() {
HashMap<String, Set<MessageQueue>> brokerMqs = this.buildProcessQueueTableByBrokerName();
Iterator<Entry<String, Set<MessageQueue>>> it = brokerMqs.entrySet().iterator();
while (it.hasNext()) {
Entry<String, Set<MessageQueue>> entry = it.next();
final String brokerName = entry.getKey();
final Set<MessageQueue> mqs = entry.getValue();
if (mqs.isEmpty()) {
continue;
}
FindBrokerResult findBrokerResult = this.mQClientFactory.findBrokerAddressInSubscribe(brokerName, MixAll.MASTER_ID, true);
if (findBrokerResult != null) {
LockBatchRequestBody requestBody = new LockBatchRequestBody();
requestBody.setConsumerGroup(this.consumerGroup);
requestBody.setClientId(this.mQClientFactory.getClientId());
requestBody.setMqSet(mqs);
try {
Set<MessageQueue> lockOKMQSet =
this.mQClientFactory.getMQClientAPIImpl().lockBatchMQ(findBrokerResult.getBrokerAddr(), requestBody, 1000);
for (MessageQueue mq : mqs) {
ProcessQueue processQueue = this.processQueueTable.get(mq);
if (processQueue != null) {
if (lockOKMQSet.contains(mq)) {
if (!processQueue.isLocked()) {
log.info("the message queue locked OK, Group: {} {}", this.consumerGroup, mq);
}
processQueue.setLocked(true);
processQueue.setLastLockTimestamp(System.currentTimeMillis());
} else {
processQueue.setLocked(false);
log.warn("the message queue locked Failed, Group: {} {}", this.consumerGroup, mq);
}
}
}
} catch (Exception e) {
log.error("lockBatchMQ exception, " + mqs, e);
}
}
}
}
public boolean clientRebalance(String topic) {
return true;
}
public boolean doRebalance(final boolean isOrder) {
boolean balanced = true;
Map<String, SubscriptionData> subTable = this.getSubscriptionInner();
if (subTable != null) {
for (final Map.Entry<String, SubscriptionData> entry : subTable.entrySet()) {
final String topic = entry.getKey();
try {
if (!clientRebalance(topic)) {
boolean result = this.getRebalanceResultFromBroker(topic, isOrder);
if (!result) {
balanced = false;
}
} else {
boolean result = this.rebalanceByTopic(topic, isOrder);
if (!result) {
balanced = false;
}
}
} catch (Throwable e) {
if (!topic.startsWith(MixAll.RETRY_GROUP_TOPIC_PREFIX)) {
log.warn("rebalance Exception", e);
balanced = false;
}
}
}
}
this.truncateMessageQueueNotMyTopic();
return balanced;
}
public ConcurrentMap<String, SubscriptionData> getSubscriptionInner() {
return subscriptionInner;
}
private boolean rebalanceByTopic(final String topic, final boolean isOrder) {
boolean balanced = true;
switch (messageModel) {
case BROADCASTING: {
Set<MessageQueue> mqSet = this.topicSubscribeInfoTable.get(topic);
if (mqSet != null) {
boolean changed = this.updateProcessQueueTableInRebalance(topic, mqSet, false);
if (changed) {
this.messageQueueChanged(topic, mqSet, mqSet);
log.info("messageQueueChanged {} {} {} {}", consumerGroup, topic, mqSet, mqSet);
}
balanced = mqSet.equals(getWorkingMessageQueue(topic));
} else {
this.messageQueueChanged(topic, Collections.<MessageQueue>emptySet(), Collections.<MessageQueue>emptySet());
log.warn("doRebalance, {}, but the topic[{}] not exist.", consumerGroup, topic);
}
break;
}
case CLUSTERING: {
Set<MessageQueue> mqSet = this.topicSubscribeInfoTable.get(topic);
List<String> cidAll = this.mQClientFactory.findConsumerIdList(topic, consumerGroup);
if (null == mqSet) {
if (!topic.startsWith(MixAll.RETRY_GROUP_TOPIC_PREFIX)) {
this.messageQueueChanged(topic, Collections.<MessageQueue>emptySet(), Collections.<MessageQueue>emptySet());
log.warn("doRebalance, {}, but the topic[{}] not exist.", consumerGroup, topic);
}
}
if (null == cidAll) {
log.warn("doRebalance, {} {}, get consumer id list failed", consumerGroup, topic);
}
if (mqSet != null && cidAll != null) {
List<MessageQueue> mqAll = new ArrayList<>();
mqAll.addAll(mqSet);
Collections.sort(mqAll);
Collections.sort(cidAll);
AllocateMessageQueueStrategy strategy = this.allocateMessageQueueStrategy;
List<MessageQueue> allocateResult = null;
try {
allocateResult = strategy.allocate(
this.consumerGroup,
this.mQClientFactory.getClientId(),
mqAll,
cidAll);
} catch (Throwable e) {
log.error("allocate message queue exception. strategy name: {}, ex: {}", strategy.getName(), e);
return false;
}
Set<MessageQueue> allocateResultSet = new HashSet<>();
if (allocateResult != null) {
allocateResultSet.addAll(allocateResult);
}
boolean changed = this.updateProcessQueueTableInRebalance(topic, allocateResultSet, isOrder);
if (changed) {
log.info(
"client rebalanced result changed. allocateMessageQueueStrategyName={}, group={}, topic={}, clientId={}, mqAllSize={}, cidAllSize={}, rebalanceResultSize={}, rebalanceResultSet={}",
strategy.getName(), consumerGroup, topic, this.mQClientFactory.getClientId(), mqSet.size(), cidAll.size(),
allocateResultSet.size(), allocateResultSet);
this.messageQueueChanged(topic, mqSet, allocateResultSet);
}
balanced = allocateResultSet.equals(getWorkingMessageQueue(topic));
}
break;
}
default:
break;
}
return balanced;
}
private boolean getRebalanceResultFromBroker(final String topic, final boolean isOrder) {
String strategyName = this.allocateMessageQueueStrategy.getName();
Set<MessageQueueAssignment> messageQueueAssignments;
try {
messageQueueAssignments = this.mQClientFactory.queryAssignment(topic, consumerGroup,
strategyName, messageModel, QUERY_ASSIGNMENT_TIMEOUT);
} catch (Exception e) {
log.error("allocate message queue exception. strategy name: {}, ex: {}", strategyName, e);
return false;
}
// null means invalid result, we should skip the update logic
if (messageQueueAssignments == null) {
return false;
}
Set<MessageQueue> mqSet = new HashSet<>();
for (MessageQueueAssignment messageQueueAssignment : messageQueueAssignments) {
if (messageQueueAssignment.getMessageQueue() != null) {
mqSet.add(messageQueueAssignment.getMessageQueue());
}
}
Set<MessageQueue> mqAll = null;
boolean changed = this.updateMessageQueueAssignment(topic, messageQueueAssignments, isOrder);
if (changed) {
log.info("broker rebalanced result changed. allocateMessageQueueStrategyName={}, group={}, topic={}, clientId={}, assignmentSet={}",
strategyName, consumerGroup, topic, this.mQClientFactory.getClientId(), messageQueueAssignments);
this.messageQueueChanged(topic, mqAll, mqSet);
}
return mqSet.equals(getWorkingMessageQueue(topic));
}
private Set<MessageQueue> getWorkingMessageQueue(String topic) {
Set<MessageQueue> queueSet = new HashSet<>();
for (Entry<MessageQueue, ProcessQueue> entry : this.processQueueTable.entrySet()) {
MessageQueue mq = entry.getKey();
ProcessQueue pq = entry.getValue();
if (mq.getTopic().equals(topic) && !pq.isDropped()) {
queueSet.add(mq);
}
}
for (Entry<MessageQueue, PopProcessQueue> entry : this.popProcessQueueTable.entrySet()) {
MessageQueue mq = entry.getKey();
PopProcessQueue pq = entry.getValue();
if (mq.getTopic().equals(topic) && !pq.isDropped()) {
queueSet.add(mq);
}
}
return queueSet;
}
private void truncateMessageQueueNotMyTopic() {
Map<String, SubscriptionData> subTable = this.getSubscriptionInner();
for (MessageQueue mq : this.processQueueTable.keySet()) {
if (!subTable.containsKey(mq.getTopic())) {
ProcessQueue pq = this.processQueueTable.remove(mq);
if (pq != null) {
pq.setDropped(true);
log.info("doRebalance, {}, truncateMessageQueueNotMyTopic remove unnecessary mq, {}", consumerGroup, mq);
}
}
}
for (MessageQueue mq : this.popProcessQueueTable.keySet()) {
if (!subTable.containsKey(mq.getTopic())) {
PopProcessQueue pq = this.popProcessQueueTable.remove(mq);
if (pq != null) {
pq.setDropped(true);
log.info("doRebalance, {}, truncateMessageQueueNotMyTopic remove unnecessary pop mq, {}", consumerGroup, mq);
}
}
}
}
private boolean updateProcessQueueTableInRebalance(final String topic, final Set<MessageQueue> mqSet,
final boolean needLockMq) {
boolean changed = false;
// drop process queues no longer belong me
HashMap<MessageQueue, ProcessQueue> removeQueueMap = new HashMap<>(this.processQueueTable.size());
Iterator<Entry<MessageQueue, ProcessQueue>> it = this.processQueueTable.entrySet().iterator();
while (it.hasNext()) {
Entry<MessageQueue, ProcessQueue> next = it.next();
MessageQueue mq = next.getKey();
ProcessQueue pq = next.getValue();
if (mq.getTopic().equals(topic)) {
if (!mqSet.contains(mq)) {
pq.setDropped(true);
removeQueueMap.put(mq, pq);
} else if (pq.isPullExpired() && this.consumeType() == ConsumeType.CONSUME_PASSIVELY) {
pq.setDropped(true);
removeQueueMap.put(mq, pq);
log.error("[BUG]doRebalance, {}, try remove unnecessary mq, {}, because pull is pause, so try to fixed it",
consumerGroup, mq);
}
}
}
// remove message queues no longer belong me
for (Entry<MessageQueue, ProcessQueue> entry : removeQueueMap.entrySet()) {
MessageQueue mq = entry.getKey();
ProcessQueue pq = entry.getValue();
if (this.removeUnnecessaryMessageQueue(mq, pq)) {
this.processQueueTable.remove(mq);
changed = true;
log.info("doRebalance, {}, remove unnecessary mq, {}", consumerGroup, mq);
}
}
// add new message queue
boolean allMQLocked = true;
List<PullRequest> pullRequestList = new ArrayList<>();
for (MessageQueue mq : mqSet) {
if (!this.processQueueTable.containsKey(mq)) {
if (needLockMq && !this.lock(mq)) {
log.warn("doRebalance, {}, add a new mq failed, {}, because lock failed", consumerGroup, mq);
allMQLocked = false;
continue;
}
this.removeDirtyOffset(mq);
ProcessQueue pq = createProcessQueue();
pq.setLocked(true);
long nextOffset = this.computePullFromWhere(mq);
if (nextOffset >= 0) {
ProcessQueue pre = this.processQueueTable.putIfAbsent(mq, pq);
if (pre != null) {
log.info("doRebalance, {}, mq already exists, {}", consumerGroup, mq);
} else {
log.info("doRebalance, {}, add a new mq, {}", consumerGroup, mq);
PullRequest pullRequest = new PullRequest();
pullRequest.setConsumerGroup(consumerGroup);
pullRequest.setNextOffset(nextOffset);
pullRequest.setMessageQueue(mq);
pullRequest.setProcessQueue(pq);
pullRequestList.add(pullRequest);
changed = true;
}
} else {
log.warn("doRebalance, {}, add new mq failed, {}", consumerGroup, mq);
}
}
}
if (!allMQLocked) {
mQClientFactory.rebalanceLater(500);
}
this.dispatchPullRequest(pullRequestList, 500);
return changed;
}
private boolean updateMessageQueueAssignment(final String topic, final Set<MessageQueueAssignment> assignments,
final boolean isOrder) {
boolean changed = false;
Map<MessageQueue, MessageQueueAssignment> mq2PushAssignment = new HashMap<>();
Map<MessageQueue, MessageQueueAssignment> mq2PopAssignment = new HashMap<>();
for (MessageQueueAssignment assignment : assignments) {
MessageQueue messageQueue = assignment.getMessageQueue();
if (messageQueue == null) {
continue;
}
if (MessageRequestMode.POP == assignment.getMode()) {
mq2PopAssignment.put(messageQueue, assignment);
} else {
mq2PushAssignment.put(messageQueue, assignment);
}
}
if (!topic.startsWith(MixAll.RETRY_GROUP_TOPIC_PREFIX)) {
if (mq2PopAssignment.isEmpty() && !mq2PushAssignment.isEmpty()) {
//pop switch to push
//subscribe pop retry topic
try {
final String retryTopic = KeyBuilder.buildPopRetryTopic(topic, getConsumerGroup());
SubscriptionData subscriptionData = FilterAPI.buildSubscriptionData(retryTopic, SubscriptionData.SUB_ALL);
getSubscriptionInner().put(retryTopic, subscriptionData);
} catch (Exception ignored) {
}
} else if (!mq2PopAssignment.isEmpty() && mq2PushAssignment.isEmpty()) {
//push switch to pop
//unsubscribe pop retry topic
try {
final String retryTopic = KeyBuilder.buildPopRetryTopic(topic, getConsumerGroup());
getSubscriptionInner().remove(retryTopic);
} catch (Exception ignored) {
}
}
}
{
// drop process queues no longer belong me
HashMap<MessageQueue, ProcessQueue> removeQueueMap = new HashMap<>(this.processQueueTable.size());
Iterator<Entry<MessageQueue, ProcessQueue>> it = this.processQueueTable.entrySet().iterator();
while (it.hasNext()) {
Entry<MessageQueue, ProcessQueue> next = it.next();
MessageQueue mq = next.getKey();
ProcessQueue pq = next.getValue();
if (mq.getTopic().equals(topic)) {
if (!mq2PushAssignment.containsKey(mq)) {
pq.setDropped(true);
removeQueueMap.put(mq, pq);
} else if (pq.isPullExpired() && this.consumeType() == ConsumeType.CONSUME_PASSIVELY) {
pq.setDropped(true);
removeQueueMap.put(mq, pq);
log.error("[BUG]doRebalance, {}, try remove unnecessary mq, {}, because pull is pause, so try to fixed it",
consumerGroup, mq);
}
}
}
// remove message queues no longer belong me
for (Entry<MessageQueue, ProcessQueue> entry : removeQueueMap.entrySet()) {
MessageQueue mq = entry.getKey();
ProcessQueue pq = entry.getValue();
if (this.removeUnnecessaryMessageQueue(mq, pq)) {
this.processQueueTable.remove(mq);
changed = true;
log.info("doRebalance, {}, remove unnecessary mq, {}", consumerGroup, mq);
}
}
}
{
HashMap<MessageQueue, PopProcessQueue> removeQueueMap = new HashMap<>(this.popProcessQueueTable.size());
Iterator<Entry<MessageQueue, PopProcessQueue>> it = this.popProcessQueueTable.entrySet().iterator();
while (it.hasNext()) {
Entry<MessageQueue, PopProcessQueue> next = it.next();
MessageQueue mq = next.getKey();
PopProcessQueue pq = next.getValue();
if (mq.getTopic().equals(topic)) {
if (!mq2PopAssignment.containsKey(mq)) {
//the queue is no longer your assignment
pq.setDropped(true);
removeQueueMap.put(mq, pq);
} else if (pq.isPullExpired() && this.consumeType() == ConsumeType.CONSUME_PASSIVELY) {
pq.setDropped(true);
removeQueueMap.put(mq, pq);
log.error("[BUG]doRebalance, {}, try remove unnecessary pop mq, {}, because pop is pause, so try to fixed it",
consumerGroup, mq);
}
}
}
// remove message queues no longer belong me
for (Entry<MessageQueue, PopProcessQueue> entry : removeQueueMap.entrySet()) {
MessageQueue mq = entry.getKey();
PopProcessQueue pq = entry.getValue();
if (this.removeUnnecessaryPopMessageQueue(mq, pq)) {
this.popProcessQueueTable.remove(mq);
changed = true;
log.info("doRebalance, {}, remove unnecessary pop mq, {}", consumerGroup, mq);
}
}
}
{
// add new message queue
boolean allMQLocked = true;
List<PullRequest> pullRequestList = new ArrayList<>();
for (MessageQueue mq : mq2PushAssignment.keySet()) {
if (!this.processQueueTable.containsKey(mq)) {
if (isOrder && !this.lock(mq)) {
log.warn("doRebalance, {}, add a new mq failed, {}, because lock failed", consumerGroup, mq);
allMQLocked = false;
continue;
}
this.removeDirtyOffset(mq);
ProcessQueue pq = createProcessQueue();
pq.setLocked(true);
long nextOffset = -1L;
try {
nextOffset = this.computePullFromWhereWithException(mq);
} catch (Exception e) {
log.info("doRebalance, {}, compute offset failed, {}", consumerGroup, mq);
continue;
}
if (nextOffset >= 0) {
ProcessQueue pre = this.processQueueTable.putIfAbsent(mq, pq);
if (pre != null) {
log.info("doRebalance, {}, mq already exists, {}", consumerGroup, mq);
} else {
log.info("doRebalance, {}, add a new mq, {}", consumerGroup, mq);
PullRequest pullRequest = new PullRequest();
pullRequest.setConsumerGroup(consumerGroup);
pullRequest.setNextOffset(nextOffset);
pullRequest.setMessageQueue(mq);
pullRequest.setProcessQueue(pq);
pullRequestList.add(pullRequest);
changed = true;
}
} else {
log.warn("doRebalance, {}, add new mq failed, {}", consumerGroup, mq);
}
}
}
if (!allMQLocked) {
mQClientFactory.rebalanceLater(500);
}
this.dispatchPullRequest(pullRequestList, 500);
}
{
// add new message queue
List<PopRequest> popRequestList = new ArrayList<>();
for (MessageQueue mq : mq2PopAssignment.keySet()) {
if (!this.popProcessQueueTable.containsKey(mq)) {
PopProcessQueue pq = createPopProcessQueue();
PopProcessQueue pre = this.popProcessQueueTable.putIfAbsent(mq, pq);
if (pre != null) {
log.info("doRebalance, {}, mq pop already exists, {}", consumerGroup, mq);
} else {
log.info("doRebalance, {}, add a new pop mq, {}", consumerGroup, mq);
PopRequest popRequest = new PopRequest();
popRequest.setTopic(topic);
popRequest.setConsumerGroup(consumerGroup);
popRequest.setMessageQueue(mq);
popRequest.setPopProcessQueue(pq);
popRequest.setInitMode(getConsumeInitMode());
popRequestList.add(popRequest);
changed = true;
}
}
}
this.dispatchPopPullRequest(popRequestList, 500);
}
return changed;
}
public abstract void messageQueueChanged(final String topic, final Set<MessageQueue> mqAll,
final Set<MessageQueue> mqDivided);
public abstract boolean removeUnnecessaryMessageQueue(final MessageQueue mq, final ProcessQueue pq);
public boolean removeUnnecessaryPopMessageQueue(final MessageQueue mq, final PopProcessQueue pq) {
return true;
}
public abstract ConsumeType consumeType();
public abstract void removeDirtyOffset(final MessageQueue mq);
/**
* When the network is unstable, using this interface may return wrong offset.
* It is recommended to use computePullFromWhereWithException instead.
* @param mq
* @return offset
*/
@Deprecated
public abstract long computePullFromWhere(final MessageQueue mq);
public abstract long computePullFromWhereWithException(final MessageQueue mq) throws MQClientException;
public abstract int getConsumeInitMode();
public abstract void dispatchPullRequest(final List<PullRequest> pullRequestList, final long delay);
public abstract void dispatchPopPullRequest(final List<PopRequest> pullRequestList, final long delay);
public abstract ProcessQueue createProcessQueue();
public abstract PopProcessQueue createPopProcessQueue();
public void removeProcessQueue(final MessageQueue mq) {
ProcessQueue prev = this.processQueueTable.remove(mq);
if (prev != null) {
boolean droped = prev.isDropped();
prev.setDropped(true);
this.removeUnnecessaryMessageQueue(mq, prev);
log.info("Fix Offset, {}, remove unnecessary mq, {} Droped: {}", consumerGroup, mq, droped);
}
}
public ConcurrentMap<MessageQueue, ProcessQueue> getProcessQueueTable() {
return processQueueTable;
}
public ConcurrentMap<MessageQueue, PopProcessQueue> getPopProcessQueueTable() {
return popProcessQueueTable;
}
public ConcurrentMap<String, Set<MessageQueue>> getTopicSubscribeInfoTable() {
return topicSubscribeInfoTable;
}
public String getConsumerGroup() {
return consumerGroup;
}
public void setConsumerGroup(String consumerGroup) {
this.consumerGroup = consumerGroup;
}
public MessageModel getMessageModel() {
return messageModel;
}
public void setMessageModel(MessageModel messageModel) {
this.messageModel = messageModel;
}
public AllocateMessageQueueStrategy getAllocateMessageQueueStrategy() {
return allocateMessageQueueStrategy;
}
public void setAllocateMessageQueueStrategy(AllocateMessageQueueStrategy allocateMessageQueueStrategy) {
this.allocateMessageQueueStrategy = allocateMessageQueueStrategy;
}
public MQClientInstance getmQClientFactory() {
return mQClientFactory;
}
public void setmQClientFactory(MQClientInstance mQClientFactory) {
this.mQClientFactory = mQClientFactory;
}
public void destroy() {
Iterator<Entry<MessageQueue, ProcessQueue>> it = this.processQueueTable.entrySet().iterator();
while (it.hasNext()) {
Entry<MessageQueue, ProcessQueue> next = it.next();
next.getValue().setDropped(true);
}
this.processQueueTable.clear();
Iterator<Entry<MessageQueue, PopProcessQueue>> popIt = this.popProcessQueueTable.entrySet().iterator();
while (popIt.hasNext()) {
Entry<MessageQueue, PopProcessQueue> next = popIt.next();
next.getValue().setDropped(true);
}
this.popProcessQueueTable.clear();
}
}
|
apache/impala | 35,204 | fe/src/main/java/org/apache/impala/catalog/paimon/PaimonUtil.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.impala.catalog.paimon;
import static org.apache.impala.catalog.Table.isExternalPurgeTable;
import static org.apache.paimon.CoreOptions.PARTITION_DEFAULT_NAME;
import static org.apache.paimon.CoreOptions.PARTITION_GENERATE_LEGCY_NAME;
import static org.apache.paimon.utils.HadoopUtils.HADOOP_LOAD_DEFAULT_CONFIG;
import com.google.common.collect.Lists;
import org.apache.commons.lang3.SerializationUtils;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.TableType;
import org.apache.hadoop.hive.metastore.api.BinaryColumnStatsData;
import org.apache.hadoop.hive.metastore.api.BooleanColumnStatsData;
import org.apache.hadoop.hive.metastore.api.ColumnStatisticsData;
import org.apache.hadoop.hive.metastore.api.DoubleColumnStatsData;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.metastore.api.LongColumnStatsData;
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
import org.apache.hadoop.hive.metastore.api.StringColumnStatsData;
import org.apache.impala.catalog.Column;
import org.apache.impala.catalog.DatabaseNotFoundException;
import org.apache.impala.catalog.HdfsFileFormat;
import org.apache.impala.catalog.MetaStoreClientPool;
import org.apache.impala.catalog.Type;
import org.apache.impala.common.FileSystemUtil;
import org.apache.impala.common.ImpalaRuntimeException;
import org.apache.impala.common.PrintUtils;
import org.apache.impala.compat.MetastoreShim;
import org.apache.impala.thrift.TColumn;
import org.apache.impala.thrift.TDescribeHistoryParams;
import org.apache.impala.thrift.TGetTableHistoryResult;
import org.apache.impala.thrift.TGetTableHistoryResultItem;
import org.apache.impala.thrift.TPaimonCatalog;
import org.apache.impala.thrift.TPaimonTable;
import org.apache.impala.thrift.TPaimonTableKind;
import org.apache.impala.thrift.TResultSet;
import org.apache.impala.thrift.TResultSetMetadata;
import org.apache.impala.thrift.TShowFilesParams;
import org.apache.impala.thrift.TTableStats;
import org.apache.impala.util.TResultRowBuilder;
import org.apache.paimon.CoreOptions;
import org.apache.paimon.catalog.AbstractCatalog;
import org.apache.paimon.catalog.Catalog;
import org.apache.paimon.catalog.CatalogContext;
import org.apache.paimon.catalog.CatalogFactory;
import org.apache.paimon.catalog.CatalogUtils;
import org.apache.paimon.catalog.Identifier;
import org.apache.paimon.data.InternalRow;
import org.apache.paimon.data.Timestamp;
import org.apache.paimon.fs.FileIO;
import org.apache.paimon.fs.Path;
import org.apache.paimon.hive.HiveCatalog;
import org.apache.paimon.hive.HiveTypeUtils;
import org.apache.paimon.hive.LocationKeyExtractor;
import org.apache.paimon.hive.utils.HiveUtils;
import org.apache.paimon.options.CatalogOptions;
import org.apache.paimon.options.Options;
import org.apache.paimon.partition.Partition;
import org.apache.paimon.predicate.Predicate;
import org.apache.paimon.predicate.PredicateBuilder;
import org.apache.paimon.reader.RecordReader;
import org.apache.paimon.schema.Schema;
import org.apache.paimon.stats.ColStats;
import org.apache.paimon.table.FileStoreTable;
import org.apache.paimon.table.FileStoreTableFactory;
import org.apache.paimon.table.Table;
import org.apache.paimon.table.source.DataSplit;
import org.apache.paimon.table.source.DeletionFile;
import org.apache.paimon.table.source.RawFile;
import org.apache.paimon.table.source.ReadBuilder;
import org.apache.paimon.table.source.Split;
import org.apache.paimon.table.source.TableRead;
import org.apache.paimon.table.source.TableScan;
import org.apache.paimon.table.system.SystemTableLoader;
import org.apache.paimon.types.BigIntType;
import org.apache.paimon.types.BooleanType;
import org.apache.paimon.types.DataField;
import org.apache.paimon.types.DataTypeFamily;
import org.apache.paimon.types.DoubleType;
import org.apache.paimon.types.FloatType;
import org.apache.paimon.types.IntType;
import org.apache.paimon.types.RowType;
import org.apache.paimon.types.SmallIntType;
import org.apache.paimon.types.TinyIntType;
import org.apache.paimon.utils.InternalRowPartitionComputer;
import org.apache.thrift.TException;
import org.postgresql.shaded.com.ongres.scram.common.util.Preconditions;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.time.Instant;
import java.time.ZoneId;
import java.time.ZoneOffset;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.function.Consumer;
import java.util.stream.Collectors;
public class PaimonUtil {
final static Logger LOG = LoggerFactory.getLogger(PaimonUtil.class);
public static final String PAIMON_STORAGE_HANDLER =
"org.apache.paimon.hive.PaimonStorageHandler";
public static final String STORAGE_HANDLER = "storage_handler";
public static final String PAIMON_CATALOG = "paimon.catalog";
public static final String HIVE_CATALOG = "hive";
public static final String PAIMON_PROPERTY_PREFIX = "";
public static final String PAIMON_HADOOP_CATALOG_LOCATION = "paimon.catalog_location";
public static final String PAIMON_TABLE_LOCATION = "paimon_location";
public static final String PAIMON_TABLE_IDENTIFIER = "paimon.table_identifier";
private static final HiveConf hiveConf_ = new HiveConf();
public static Catalog catalog_ = null;
private static final String metastoreClientClass_ =
"org.apache.hadoop.hive.metastore.HiveMetaStoreClient";
/**
* Returns true if the given Metastore Table represents an Paimon table.
* Versions of Hive/Paimon are inconsistent which Paimon related fields are set
* (e.g., HIVE-6548 changed the input format to null).
* For maximum compatibility consider all known fields that indicate an Paimon table.
*/
public static boolean isPaimonTable(org.apache.hadoop.hive.metastore.api.Table msTbl) {
if (msTbl.getParameters() != null
&& PAIMON_STORAGE_HANDLER.equals(
msTbl.getParameters().getOrDefault(STORAGE_HANDLER, ""))) {
return true;
}
StorageDescriptor sd = msTbl.getSd();
if (sd == null) return false;
if (sd.getInputFormat() != null
&& sd.getInputFormat().equals(HdfsFileFormat.PAIMON.inputFormat())) {
return true;
} else
return sd.getSerdeInfo() != null && sd.getSerdeInfo().getSerializationLib() != null
&& sd.getSerdeInfo().getSerializationLib().equals(
HdfsFileFormat.PAIMON.serializationLib());
}
public static ByteBuffer serialize(FePaimonTable paimonTable) throws IOException {
return ByteBuffer.wrap(SerializationUtils.serialize(paimonTable.getPaimonApiTable()));
}
public static Table deserialize(ByteBuffer b) throws Exception {
return SerializationUtils.deserialize(b.array());
}
/**
* Get Thrift object for paimon table.
*/
public static TPaimonTable getTPaimonTable(FePaimonTable paimonTable)
throws IOException {
TPaimonTable t_ = new TPaimonTable();
t_.setKind(TPaimonTableKind.JNI);
t_.setJni_tbl_obj(serialize(paimonTable));
return t_;
}
/**
* Converts Paimon schema to a Hive schema.
*/
public static List<FieldSchema> convertToHiveSchema(RowType schema)
throws ImpalaRuntimeException {
List<FieldSchema> ret = new ArrayList<>();
for (DataField dataField : schema.getFields()) {
ret.add(new FieldSchema(dataField.name().toLowerCase(),
HiveTypeUtils.toTypeInfo(dataField.type()).getTypeName(),
dataField.description()));
}
return ret;
}
/**
* Converts Paimon schema to an Impala schema.
*/
public static List<Column> convertToImpalaSchema(RowType schema)
throws ImpalaRuntimeException {
List<Column> ret = new ArrayList<>();
int pos = 0;
for (DataField dataField : schema.getFields()) {
Type colType = ImpalaTypeUtils.toImpalaType(dataField.type());
ret.add(new Column(dataField.name().toLowerCase(), colType, pos++));
}
return ret;
}
/**
* Generates Paimon schema from given columns.
*/
public static Schema genPaimonSchema(List<TColumn> columns, List<String> partitionKeys,
Map<String, String> options) {
Schema.Builder schemaBuilder = Schema.newBuilder();
for (TColumn column : columns) {
schemaBuilder.column(column.getColumnName().toLowerCase(),
ImpalaTypeUtils.fromImpalaType(Type.fromThrift(column.getColumnType())));
}
if (!partitionKeys.isEmpty()) { schemaBuilder.partitionKeys(partitionKeys); }
if (!options.isEmpty()) { schemaBuilder.options(options); }
return schemaBuilder.build();
}
/**
* Returns the corresponding paimon catalog implementation.
* TODO:
*/
public static Catalog getPaimonCatalog(TPaimonCatalog catalog, boolean isExternal,
String warehouse_location) throws ImpalaRuntimeException {
switch (catalog) {
case HADOOP_CATALOG: {
Preconditions.checkNotNull(
warehouse_location, "warehouse location should not be null");
CatalogContext context = CatalogContext.create(new Path(warehouse_location));
return CatalogFactory.createCatalog(context);
}
case HIVE_CATALOG: {
try {
String location = isExternal ?
hiveConf_.get(HiveConf.ConfVars.HIVE_METASTORE_WAREHOUSE_EXTERNAL.varname) :
hiveConf_.get(HiveConf.ConfVars.METASTOREWAREHOUSE.varname);
Path path = new Path(location);
Options catalogOptions = new Options();
catalogOptions.set(CatalogOptions.WAREHOUSE, location);
CatalogContext catalogContext = CatalogContext.create(catalogOptions);
FileIO fileIO = FileIO.get(path, catalogContext);
HiveCatalog externalWarehouseCatalog =
new HiveCatalog(fileIO, hiveConf_, metastoreClientClass_, location);
return externalWarehouseCatalog;
} catch (Exception ex) {
throw new ImpalaRuntimeException("failed to create hive catalog : ", ex);
}
}
default: throw new ImpalaRuntimeException("Unexpected catalog type: " + catalog);
}
}
/**
* get Paimon Identifier object.
*/
public static Identifier getTableIdentifier(String dbName, String tableName) {
return new Identifier(dbName, tableName);
}
/**
* Generates Paimon table identifier from HMS table object.
*/
public static Identifier getTableIdentifier(
org.apache.hadoop.hive.metastore.api.Table msTable) {
String name = msTable.getParameters().get(PAIMON_TABLE_IDENTIFIER);
if (name == null || name.isEmpty()) {
return getTableIdentifier(
msTable.getDbName().toLowerCase(), msTable.getTableName().toLowerCase());
} else {
if (!name.contains(".")) {
return getTableIdentifier(msTable.getDbName(), name);
} else {
String[] names = name.split("\\.");
return getTableIdentifier(names[0], names[1]);
}
}
}
/**
* Convert paimon column stats to HMS ColumnStatisticsData.
*/
public static Optional<ColumnStatisticsData> convertColStats(
ColStats<?> colStats, DataField dataField) {
ColumnStatisticsData columnStatisticsData = new ColumnStatisticsData();
colStats.deserializeFieldsFromString(dataField.type());
Set<DataTypeFamily> fieldFamilySet = dataField.type().getTypeRoot().getFamilies();
if (fieldFamilySet.contains(DataTypeFamily.NUMERIC)) {
if (fieldFamilySet.contains(DataTypeFamily.INTEGER_NUMERIC)) {
// LONG_STATS
LongColumnStatsData longColumnStatsData = new LongColumnStatsData();
if (colStats.nullCount().isPresent()) {
longColumnStatsData.setNumNulls(colStats.nullCount().getAsLong());
}
if (dataField.type() instanceof BigIntType) {
if (colStats.min().isPresent()) {
longColumnStatsData.setLowValue((Long) colStats.min().get());
}
if (colStats.max().isPresent()) {
longColumnStatsData.setHighValue((Long) colStats.max().get());
}
} else if (dataField.type() instanceof IntType) {
if (colStats.min().isPresent()) {
longColumnStatsData.setLowValue((Integer) colStats.min().get());
}
if (colStats.max().isPresent()) {
longColumnStatsData.setHighValue((Integer) colStats.max().get());
}
} else if (dataField.type() instanceof SmallIntType) {
if (colStats.min().isPresent()) {
longColumnStatsData.setLowValue((Short) colStats.min().get());
}
if (colStats.max().isPresent()) {
longColumnStatsData.setHighValue((Short) colStats.max().get());
}
} else if (dataField.type() instanceof TinyIntType) {
if (colStats.min().isPresent()) {
longColumnStatsData.setLowValue((Byte) colStats.min().get());
}
if (colStats.max().isPresent()) {
longColumnStatsData.setHighValue((Byte) colStats.max().get());
}
} else {
LOG.warn(String.format("Column stats doesn't support data type %s",
dataField.type().asSQLString()));
return Optional.empty();
}
if (colStats.distinctCount().isPresent()) {
longColumnStatsData.setNumDVs(colStats.distinctCount().getAsLong());
}
columnStatisticsData.setLongStats(longColumnStatsData);
return Optional.of(columnStatisticsData);
} else if (fieldFamilySet.contains(DataTypeFamily.APPROXIMATE_NUMERIC)) {
// DOUBLE_STATS
DoubleColumnStatsData doubleColumnStatsData = new DoubleColumnStatsData();
if (colStats.nullCount().isPresent()) {
doubleColumnStatsData.setNumNulls(colStats.nullCount().getAsLong());
}
if (dataField.type() instanceof DoubleType) {
if (colStats.min().isPresent()) {
doubleColumnStatsData.setLowValue((Double) colStats.min().get());
}
if (colStats.max().isPresent()) {
doubleColumnStatsData.setHighValue((Double) colStats.max().get());
}
} else if (dataField.type() instanceof FloatType) {
if (colStats.min().isPresent()) {
doubleColumnStatsData.setLowValue((Float) colStats.min().get());
}
if (colStats.max().isPresent()) {
doubleColumnStatsData.setHighValue((Float) colStats.max().get());
}
} else {
LOG.warn(String.format("Column stats doesn't support data type %s",
dataField.type().asSQLString()));
return Optional.empty();
}
if (colStats.distinctCount().isPresent()) {
doubleColumnStatsData.setNumDVs(colStats.distinctCount().getAsLong());
}
columnStatisticsData.setDoubleStats(doubleColumnStatsData);
return Optional.of(columnStatisticsData);
} else {
LOG.warn(String.format("Column stats doesn't support data type %s",
dataField.type().asSQLString()));
return Optional.empty();
}
} else if (fieldFamilySet.contains(DataTypeFamily.CHARACTER_STRING)) {
// STRING_STATS
StringColumnStatsData stringColumnStatsData = new StringColumnStatsData();
if (colStats.nullCount().isPresent()) {
stringColumnStatsData.setNumNulls(colStats.nullCount().getAsLong());
}
if (colStats.avgLen().isPresent()) {
stringColumnStatsData.setAvgColLen(colStats.avgLen().getAsLong());
}
if (colStats.maxLen().isPresent()) {
stringColumnStatsData.setMaxColLen(colStats.maxLen().getAsLong());
}
columnStatisticsData.setStringStats(stringColumnStatsData);
return Optional.of(columnStatisticsData);
} else if (fieldFamilySet.contains(DataTypeFamily.BINARY_STRING)) {
// BINARY_STATS
BinaryColumnStatsData binaryColumnStatsData = new BinaryColumnStatsData();
if (colStats.nullCount().isPresent()) {
binaryColumnStatsData.setNumNulls(colStats.nullCount().getAsLong());
}
if (colStats.avgLen().isPresent()) {
binaryColumnStatsData.setAvgColLen(colStats.avgLen().getAsLong());
}
if (colStats.maxLen().isPresent()) {
binaryColumnStatsData.setMaxColLen(colStats.maxLen().getAsLong());
}
columnStatisticsData.setBinaryStats(binaryColumnStatsData);
return Optional.of(columnStatisticsData);
} else if (dataField.type() instanceof BooleanType) {
// BOOLEAN_STATS
BooleanColumnStatsData booleanColumnStatsData = new BooleanColumnStatsData();
if (colStats.nullCount().isPresent()) {
booleanColumnStatsData.setNumNulls(colStats.nullCount().getAsLong());
}
columnStatisticsData.setBooleanStats(booleanColumnStatsData);
return Optional.of(columnStatisticsData);
} else {
LOG.warn(String.format("Column stats doesn't support data type %s",
dataField.type().asSQLString()));
return Optional.empty();
}
}
/**
* Get FileStore Object for paimon table in HMS.
*/
public static FileStoreTable createFileStoreTable(
org.apache.hadoop.hive.metastore.api.Table table) throws MetaException {
Options options = HiveUtils.extractCatalogConfig(FePaimonTable.jobConf);
options.set(CoreOptions.PATH,
LocationKeyExtractor.getPaimonLocation(FePaimonTable.jobConf, table));
CatalogContext catalogContext;
if (options.get(HADOOP_LOAD_DEFAULT_CONFIG)) {
catalogContext = CatalogContext.create(options, FePaimonTable.jobConf);
} else {
catalogContext = CatalogContext.create(options);
}
return FileStoreTableFactory.create(catalogContext);
}
/**
* Get FileStore Object for specified location
*/
public static FileStoreTable createFileStoreTable(String tableLocation)
throws MetaException {
Options options = HiveUtils.extractCatalogConfig(FePaimonTable.jobConf);
options.set(CoreOptions.PATH, tableLocation);
CatalogContext catalogContext;
if (options.get(HADOOP_LOAD_DEFAULT_CONFIG)) {
catalogContext = CatalogContext.create(options, FePaimonTable.jobConf);
} else {
catalogContext = CatalogContext.create(options);
}
return FileStoreTableFactory.create(catalogContext);
}
/**
* Return paimon catalog from table properties
*/
public static List<String> fieldNames(RowType rowType) {
return rowType.getFields()
.stream()
.map(DataField::name)
.map(String::toLowerCase)
.collect(Collectors.toList());
}
/**
* check if the given table has primary keys
*/
public static boolean hasPrimaryKey(Table table) {
return !table.primaryKeys().isEmpty();
}
/**
* check if the given table is partitioned
*/
public static boolean hasPartition(Table table) {
return !table.partitionKeys().isEmpty();
}
/**
* lookup result for given table, max 1000 rows.
*/
public static List<InternalRow> lookupInTable(Table table, List<Predicate> predicates) {
return lookupInTable(table, predicates, 1000);
}
/**
* lookup result for given table.
*/
protected static List<InternalRow> lookupInTable(
Table table, List<Predicate> predicates, int maxcount) {
ReadBuilder readBuilder = table.newReadBuilder().withFilter(predicates);
List<Split> splits = readBuilder.newScan().plan().splits();
TableRead read = readBuilder.newRead();
final List<InternalRow> internalRows = Lists.newArrayList();
try {
try (RecordReader<InternalRow> recordReader = read.createReader(splits)) {
RecordReader.RecordIterator<InternalRow> recordIterator =
recordReader.readBatch();
InternalRow internalRow = null;
while ((internalRow = recordIterator.next()) != null) {
internalRows.add(internalRow);
if (internalRows.size() >= maxcount) { break; }
}
}
} catch (IOException ex) {
LOG.warn("failed to read table", ex);
return Lists.newArrayList();
}
return internalRows;
}
/**
* check whether the given table support predicate pushdown.
*/
public static boolean canApplyPredicatePushDown(Table table) {
return table instanceof FileStoreTable;
}
/**
* query the snapshot history for paimon table.
*/
public static TGetTableHistoryResult getPaimonTableHistory(FePaimonTable feTable,
TDescribeHistoryParams params) throws DatabaseNotFoundException {
try {
// Column indexes for paimon snapshot table.
// Used to select required columns from snapshot table.
// 5: commit_time
// 0: snapshot id
// 1: schema_id
final int[] SNAPSHOT_TABLE_PROJECTION = {5, 0, 1};
TGetTableHistoryResult historyResult = new TGetTableHistoryResult();
FileStoreTable table = (FileStoreTable) feTable.getPaimonApiTable();
org.apache.paimon.table.Table snapshotTable =
SystemTableLoader.load("snapshots", table);
PredicateBuilder predicateBuilder = new PredicateBuilder(snapshotTable.rowType());
Optional<Predicate> predicataOpt = Optional.empty();
if (params.isSetFrom_time()) {
// DESCRIBE HISTORY <table> FROM <ts>
// check if commit-time >= from_time
predicataOpt = Optional.of(predicateBuilder.greaterOrEqual(
0, Timestamp.fromEpochMillis(params.getFrom_time())));
} else if (params.isSetBetween_start_time() && params.isSetBetween_end_time()) {
predicataOpt = Optional.of(PredicateBuilder.and(
predicateBuilder.greaterOrEqual(
0, Timestamp.fromEpochMillis(params.getBetween_start_time())),
predicateBuilder.lessOrEqual(
0, Timestamp.fromEpochMillis(params.getBetween_end_time()))));
}
ReadBuilder readBuilder =
snapshotTable.newReadBuilder().withProjection(SNAPSHOT_TABLE_PROJECTION);
predicataOpt.ifPresent(readBuilder::withFilter);
List<Split> splits = readBuilder.newScan().plan().splits();
RecordReader<InternalRow> internalRowRecordReader =
readBuilder.newRead().createReader(splits);
List<TGetTableHistoryResultItem> result =
com.google.common.collect.Lists.newArrayList();
internalRowRecordReader.forEachRemaining(new Consumer<InternalRow>() {
@Override
public void accept(InternalRow internalRow) {
TGetTableHistoryResultItem resultItem = new TGetTableHistoryResultItem();
long snapshotId = internalRow.getLong(1);
Timestamp timestamp = internalRow.getTimestamp(0, 9);
resultItem.setCreation_time(timestamp.getMillisecond());
resultItem.setSnapshot_id(snapshotId);
// note: parent id and ancestor id is always null
result.add(resultItem);
}
});
historyResult.setResult(result);
return historyResult;
} catch (Exception ex) {
throw new DatabaseNotFoundException("Failed to get snapshot: " + ex.getMessage());
}
}
/**
* Get HdfsFileFormat from a string, usually from table properties.
* Returns PARQUET when 'format' is null. Returns null for invalid formats.
*/
public static HdfsFileFormat getPaimonFileFormat(String format) {
if ("PARQUET".equalsIgnoreCase(format) || format == null) {
return HdfsFileFormat.PARQUET;
} else if ("ORC".equalsIgnoreCase(format)) {
return HdfsFileFormat.ORC;
}
return null;
}
/**
* A table is synchronized table if its Managed table or if its a external table with
* <code>external.table.purge</code> property set to true.
* We need to create/drop/etc. synchronized tables through the Paimon APIs as well.
*/
public static boolean isSynchronizedTable(
org.apache.hadoop.hive.metastore.api.Table msTbl) {
com.google.common.base.Preconditions.checkState(isPaimonTable(msTbl));
return isManagedTable(msTbl) || isExternalPurgeTable(msTbl);
}
/**
* Returns if this metastore table has managed table type
*/
public static boolean isManagedTable(org.apache.hadoop.hive.metastore.api.Table msTbl) {
return msTbl.getTableType().equalsIgnoreCase(TableType.MANAGED_TABLE.toString());
}
/**
* get the location for the newly created paimon table.
*/
public static String getPaimonCatalogLocation(
MetaStoreClientPool.MetaStoreClient msClient,
org.apache.hadoop.hive.metastore.api.Table msTable) throws TException {
TPaimonCatalog catalog = getTPaimonCatalog(msTable);
if (catalog == TPaimonCatalog.HADOOP_CATALOG) {
// Using catalog location to create table
// We cannot set location for 'hadoop.catalog' table in SQL
String location = msTable.getParameters().get(PAIMON_HADOOP_CATALOG_LOCATION);
Identifier table_identifier = getTableIdentifier(msTable);
return AbstractCatalog.newTableLocation(location, table_identifier).toString();
} else {
// Using normal location as 'hadoop.tables' table location and create
// table
return MetastoreShim.getPathForNewTable(
msClient.getHiveClient().getDatabase(msTable.getDbName()), msTable);
}
}
/**
* Get Paimon table catalog location with 'paimon.catalog_location' when using
* 'hadoop.catalog'
*/
public static String getPaimonCatalogLocation(
org.apache.hadoop.hive.metastore.api.Table msTable) {
return msTable.getParameters().get(PAIMON_HADOOP_CATALOG_LOCATION);
}
/**
* Get Paimon table catalog type from hms table properties
* use HiveCatalog as default
*/
public static TPaimonCatalog getTPaimonCatalog(
org.apache.hadoop.hive.metastore.api.Table msTable) {
return getTPaimonCatalog(msTable.getParameters());
}
/**
* Get Paimon table catalog type from properties
* use HiveCatalog as default
*/
public static TPaimonCatalog getTPaimonCatalog(Map<String, String> props) {
return getTPaimonCatalog(props.get(PAIMON_CATALOG));
}
/**
* Get Paimon table catalog type string
* use HiveCatalog as default
*/
public static TPaimonCatalog getTPaimonCatalog(String catalog) {
if ("hadoop".equalsIgnoreCase(catalog)) {
return TPaimonCatalog.HADOOP_CATALOG;
} else if (HIVE_CATALOG.equalsIgnoreCase(catalog) || catalog == null) {
return TPaimonCatalog.HIVE_CATALOG;
} else {
return TPaimonCatalog.HIVE_CATALOG;
}
}
/**
* Extract column names from string
*/
public static List<String> extractColumnNames(String value) {
return Arrays.stream(value.split(",")).
map(String::toLowerCase).collect(Collectors.toList());
}
/**
* Create catalog context from HMS table and location
*/
public static CatalogContext catalogContext(
org.apache.hadoop.hive.metastore.api.Table table, String location) {
Options options = HiveUtils.extractCatalogConfig(hiveConf_);
options.set(CoreOptions.PATH, location);
table.getParameters().forEach(options::set);
return CatalogContext.create(options, hiveConf_);
}
/**
* Create catalog context from HMS table and location
*/
public static String partitionSpecToString(final Map<String, String> spec) {
List<String> speclist = spec.keySet()
.stream()
.map(k -> String.join("=", k, spec.get(k)))
.collect(Collectors.toList());
return String.join("/", speclist);
}
/**
* Get partition stats for the given fe paimon table.
*/
public static TResultSet doGetTableStats(FePaimonTable table) {
TResultSet result = new TResultSet();
TResultSetMetadata resultSchema = new TResultSetMetadata();
result.setSchema(resultSchema);
result.setRows(new ArrayList<>());
resultSchema.addToColumns(new TColumn("Number Of Rows", Type.BIGINT.toThrift()));
resultSchema.addToColumns(new TColumn("Number Of Bytes", Type.BIGINT.toThrift()));
TTableStats stats = table.getTTableStats();
{
TResultRowBuilder builder = new TResultRowBuilder();
builder.add(stats.getNum_rows());
builder.add(stats.getTotal_file_bytes());
result.addToRows(builder.get());
}
return result;
}
public static long localMillisToUTCMillis(long epochMillis) {
ZoneId zone = ZoneId.systemDefault();
ZoneOffset offset = zone.getRules().getOffset(Instant.now());
return epochMillis + offset.getTotalSeconds() * 1000L;
}
/**
* Get partition stats for the given fe paimon table.
*/
public static TResultSet doGetPartitionStats(FePaimonTable table) {
TResultSet result = new TResultSet();
TResultSetMetadata resultSchema = new TResultSetMetadata();
result.setSchema(resultSchema);
result.setRows(new ArrayList<>());
resultSchema.addToColumns(new TColumn("Partition", Type.STRING.toThrift()));
resultSchema.addToColumns(new TColumn("Number Of Rows", Type.BIGINT.toThrift()));
resultSchema.addToColumns(new TColumn("Number Of Files", Type.BIGINT.toThrift()));
resultSchema.addToColumns(new TColumn("Number Of Bytes", Type.BIGINT.toThrift()));
resultSchema.addToColumns(new TColumn("Last Creation Time", Type.BIGINT.toThrift()));
List<Partition> partitions =
CatalogUtils.listPartitionsFromFileSystem(table.getPaimonApiTable());
for (Partition partition : partitions) {
TResultRowBuilder builder = new TResultRowBuilder();
builder.add(partitionSpecToString(partition.spec()));
builder.add(partition.recordCount());
builder.add(partition.fileCount());
builder.add(partition.fileSizeInBytes());
// epoch millis obtained via paimon api is from LocalDateTime
// with default timezone. Different time zone will yield different values,
// will remove default timezone offset for the epoch millis obtained via
// piamon api.
builder.add(localMillisToUTCMillis(partition.lastFileCreationTime()));
result.addToRows(builder.get());
}
return result;
}
public static int getFieldIndexByNameIgnoreCase(RowType rowType, String fieldName) {
for (int i = 0; i < rowType.getFields().size(); ++i) {
if (rowType.getFields().get(i).name().equalsIgnoreCase(fieldName)) { return i; }
}
return -1;
}
private static Optional<Predicate> extractPartitionFilter(
Table table, TShowFilesParams request) {
Iterator<java.util.List<org.apache.impala.thrift.TPartitionKeyValue>> iter =
request.getPartition_setIterator();
PredicateBuilder predicateBuilder = new PredicateBuilder(table.rowType());
List<Predicate> predicates =
request.getPartition_set()
.parallelStream()
.map(l
-> PredicateBuilder.and(
l.stream()
.map(kv
-> predicateBuilder.equal(getFieldIndexByNameIgnoreCase(
table.rowType(), kv.getName()),
kv.getValue()))
.collect(Collectors.toList())))
.collect(Collectors.toList());
if (!predicates.isEmpty()) {
return Optional.of(PredicateBuilder.or(predicates));
} else {
return Optional.empty();
}
}
/**
* Get files for the given fe paimon table.
*/
public static TResultSet doGetTableFiles(
FePaimonTable paimon_table, TShowFilesParams request) {
Table table = paimon_table.getPaimonApiTable();
TResultSet result = new TResultSet();
TResultSetMetadata resultSchema = new TResultSetMetadata();
result.setSchema(resultSchema);
resultSchema.addToColumns(new TColumn("Path", Type.STRING.toThrift()));
resultSchema.addToColumns(new TColumn("Size", Type.STRING.toThrift()));
resultSchema.addToColumns(new TColumn("Partition", Type.STRING.toThrift()));
resultSchema.addToColumns(new TColumn("EC Policy", Type.STRING.toThrift()));
resultSchema.addToColumns(new TColumn("Type", Type.STRING.toThrift()));
ReadBuilder readBuilder = table.newReadBuilder();
result.setRows(new ArrayList<>());
if (request.isSetPartition_set()) {
Optional<Predicate> predicate = extractPartitionFilter(table, request);
if (predicate.isPresent()) {
readBuilder = readBuilder.withFilter(predicate.get());
}
}
TableScan.Plan plan = readBuilder.newScan().plan();
Options options = Options.fromMap(table.options());
InternalRowPartitionComputer computer =
new InternalRowPartitionComputer(options.get(PARTITION_DEFAULT_NAME),
table.rowType(), table.partitionKeys().toArray(new String[0]),
options.get(PARTITION_GENERATE_LEGCY_NAME));
for (Split split : plan.splits()) {
if (!(split instanceof DataSplit)) continue;
DataSplit dataSplit = (DataSplit) split;
Optional<List<RawFile>> rawFiles = dataSplit.convertToRawFiles();
if (rawFiles.isPresent()) {
for (RawFile rawFile : rawFiles.get()) {
TResultRowBuilder builder = new TResultRowBuilder();
builder.add(rawFile.path());
builder.add(PrintUtils.printBytes(rawFile.fileSize()));
builder.add(
partitionSpecToString(computer.generatePartValues(dataSplit.partition())));
builder.add(FileSystemUtil.getErasureCodingPolicy(
new org.apache.hadoop.fs.Path(rawFile.path())));
builder.add("DATA");
result.addToRows(builder.get());
}
}
if (split.deletionFiles().isPresent()) {
for (DeletionFile deletionFile : split.deletionFiles().get()) {
if (deletionFile == null) break;
TResultRowBuilder builder = new TResultRowBuilder();
builder.add(deletionFile.path());
builder.add(PrintUtils.printBytes(deletionFile.length()));
builder.add(
partitionSpecToString(computer.generatePartValues(dataSplit.partition())));
builder.add(FileSystemUtil.getErasureCodingPolicy(
new org.apache.hadoop.fs.Path(deletionFile.path())));
builder.add("DELETE");
result.addToRows(builder.get());
}
}
}
return result;
}
}
|
apache/tomcat | 34,999 | java/org/apache/tomcat/util/http/parser/HttpParser.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.tomcat.util.http.parser;
import java.io.IOException;
import java.io.Reader;
import org.apache.tomcat.util.res.StringManager;
/**
* HTTP header value parser implementation. Parsing HTTP headers as per RFC2616 is not always as simple as it first
* appears. For headers that only use tokens the simple approach will normally be sufficient. However, for the other
* headers, while simple code meets 99.9% of cases, there are often some edge cases that make things far more
* complicated. The purpose of this parser is to let the parser worry about the edge cases. It provides tolerant (where
* safe to do so) parsing of HTTP header values assuming that wrapped header lines have already been unwrapped. (The
* Tomcat header processing code does the unwrapping.)
*/
public class HttpParser {
private static final StringManager sm = StringManager.getManager(HttpParser.class);
private static final int ARRAY_SIZE = 128;
private static final boolean[] IS_CONTROL = new boolean[ARRAY_SIZE];
private static final boolean[] IS_SEPARATOR = new boolean[ARRAY_SIZE];
private static final boolean[] IS_TOKEN = new boolean[ARRAY_SIZE];
private static final boolean[] IS_HEX = new boolean[ARRAY_SIZE];
private static final boolean[] IS_HTTP_PROTOCOL = new boolean[ARRAY_SIZE];
private static final boolean[] IS_ALPHA = new boolean[ARRAY_SIZE];
private static final boolean[] IS_NUMERIC = new boolean[ARRAY_SIZE];
private static final boolean[] IS_SCHEME = new boolean[ARRAY_SIZE];
private static final boolean[] IS_UNRESERVED = new boolean[ARRAY_SIZE];
private static final boolean[] IS_SUBDELIM = new boolean[ARRAY_SIZE];
private static final boolean[] IS_USERINFO = new boolean[ARRAY_SIZE];
private static final boolean[] IS_RELAXABLE = new boolean[ARRAY_SIZE];
private static final HttpParser DEFAULT;
static {
for (int i = 0; i < ARRAY_SIZE; i++) {
// Control> 0-31, 127
if (i < 32 || i == 127) {
IS_CONTROL[i] = true;
}
// Separator
if (i == '(' || i == ')' || i == '<' || i == '>' || i == '@' || i == ',' || i == ';' || i == ':' ||
i == '\\' || i == '\"' || i == '/' || i == '[' || i == ']' || i == '?' || i == '=' || i == '{' ||
i == '}' || i == ' ' || i == '\t') {
IS_SEPARATOR[i] = true;
}
// Token: Anything 0-127 that is not a control and not a separator
if (!IS_CONTROL[i] && !IS_SEPARATOR[i]) {
IS_TOKEN[i] = true;
}
// Hex: 0-9, a-f, A-F
if ((i >= '0' && i <= '9') || (i >= 'a' && i <= 'f') || (i >= 'A' && i <= 'F')) {
IS_HEX[i] = true;
}
// Not valid for HTTP protocol
// "HTTP/" DIGIT "." DIGIT
if (i == 'H' || i == 'T' || i == 'P' || i == '/' || i == '.' || (i >= '0' && i <= '9')) {
IS_HTTP_PROTOCOL[i] = true;
}
if (i >= '0' && i <= '9') {
IS_NUMERIC[i] = true;
}
if (i >= 'a' && i <= 'z' || i >= 'A' && i <= 'Z') {
IS_ALPHA[i] = true;
}
if (IS_ALPHA[i] || IS_NUMERIC[i] || i == '+' || i == '-' || i == '.') {
IS_SCHEME[i] = true;
}
if (IS_ALPHA[i] || IS_NUMERIC[i] || i == '-' || i == '.' || i == '_' || i == '~') {
IS_UNRESERVED[i] = true;
}
if (i == '!' || i == '$' || i == '&' || i == '\'' || i == '(' || i == ')' || i == '*' || i == '+' ||
i == ',' || i == ';' || i == '=') {
IS_SUBDELIM[i] = true;
}
// userinfo = *( unreserved / pct-encoded / sub-delims / ":" )
if (IS_UNRESERVED[i] || i == '%' || IS_SUBDELIM[i] || i == ':') {
IS_USERINFO[i] = true;
}
// The characters that are normally not permitted for which the
// restrictions may be relaxed when used in the path and/or query
// string
if (i == '\"' || i == '<' || i == '>' || i == '[' || i == '\\' || i == ']' || i == '^' || i == '`' ||
i == '{' || i == '|' || i == '}') {
IS_RELAXABLE[i] = true;
}
}
DEFAULT = new HttpParser(null, null);
}
private final boolean[] IS_NOT_REQUEST_TARGET = new boolean[ARRAY_SIZE];
private final boolean[] IS_ABSOLUTEPATH_RELAXED = new boolean[ARRAY_SIZE];
private final boolean[] IS_QUERY_RELAXED = new boolean[ARRAY_SIZE];
public HttpParser(String relaxedPathChars, String relaxedQueryChars) {
for (int i = 0; i < ARRAY_SIZE; i++) {
// Not valid for request target.
// Combination of multiple rules from RFC7230 and RFC 3986. Must be
// ASCII, no controls plus a few additional characters excluded
if (IS_CONTROL[i] || i == ' ' || i == '\"' || i == '#' || i == '<' || i == '>' || i == '\\' || i == '^' ||
i == '`' || i == '{' || i == '|' || i == '}') {
IS_NOT_REQUEST_TARGET[i] = true;
}
/*
* absolute-path = 1*( "/" segment ) segment = *pchar pchar = unreserved / pct-encoded / sub-delims / ":" /
* "@"
*
* Note pchar allows everything userinfo allows plus "@"
*/
if (IS_USERINFO[i] || i == '@' || i == '/') {
IS_ABSOLUTEPATH_RELAXED[i] = true;
}
/*
* query = *( pchar / "/" / "?" )
*
* Note query allows everything absolute-path allows plus "?"
*/
if (IS_ABSOLUTEPATH_RELAXED[i] || i == '?') {
IS_QUERY_RELAXED[i] = true;
}
}
relax(IS_ABSOLUTEPATH_RELAXED, relaxedPathChars);
relax(IS_QUERY_RELAXED, relaxedQueryChars);
}
public boolean isNotRequestTargetRelaxed(int c) {
// Fast for valid request target characters, slower for some incorrect
// ones
try {
return IS_NOT_REQUEST_TARGET[c];
} catch (ArrayIndexOutOfBoundsException ex) {
return true;
}
}
public boolean isAbsolutePathRelaxed(int c) {
// Fast for valid user info characters, slower for some incorrect
// ones
try {
return IS_ABSOLUTEPATH_RELAXED[c];
} catch (ArrayIndexOutOfBoundsException ex) {
return false;
}
}
public boolean isQueryRelaxed(int c) {
// Fast for valid user info characters, slower for some incorrect
// ones
try {
return IS_QUERY_RELAXED[c];
} catch (ArrayIndexOutOfBoundsException ex) {
return false;
}
}
public static String unquote(String input) {
if (input == null || input.length() < 2) {
return input;
}
int start;
int end;
// Skip surrounding quotes if there are any
if (input.charAt(0) == '"') {
start = 1;
end = input.length() - 1;
} else {
start = 0;
end = input.length();
}
StringBuilder result = new StringBuilder();
for (int i = start; i < end; i++) {
char c = input.charAt(i);
if (input.charAt(i) == '\\') {
i++;
if (i == end) {
// Input (less surrounding quotes) ended with '\'. That is
// invalid so return null.
return null;
}
result.append(input.charAt(i));
} else {
result.append(c);
}
}
return result.toString();
}
public static boolean isToken(int c) {
// Fast for correct values, slower for incorrect ones
try {
return IS_TOKEN[c];
} catch (ArrayIndexOutOfBoundsException ex) {
return false;
}
}
/**
* Is the provided String a token as per RFC 7230? <br>
* Note: token = 1 * tchar (RFC 7230) <br>
* Since a token requires at least 1 tchar, {@code null} and the empty string ({@code ""}) are not considered to be
* valid tokens.
*
* @param s The string to test
*
* @return {@code true} if the string is a valid token, otherwise {@code false}
*/
public static boolean isToken(String s) {
if (s == null) {
return false;
}
if (s.isEmpty()) {
return false;
}
for (char c : s.toCharArray()) {
if (!isToken(c)) {
return false;
}
}
return true;
}
public static boolean isHex(int c) {
// Fast for correct values, slower for some incorrect ones
try {
return IS_HEX[c];
} catch (ArrayIndexOutOfBoundsException ex) {
return false;
}
}
public static boolean isNotRequestTarget(int c) {
return DEFAULT.isNotRequestTargetRelaxed(c);
}
public static boolean isHttpProtocol(int c) {
// Fast for valid HTTP protocol characters, slower for some incorrect
// ones
try {
return IS_HTTP_PROTOCOL[c];
} catch (ArrayIndexOutOfBoundsException ex) {
return false;
}
}
public static boolean isAlpha(int c) {
// Fast for valid alpha characters, slower for some incorrect
// ones
try {
return IS_ALPHA[c];
} catch (ArrayIndexOutOfBoundsException ex) {
return false;
}
}
public static boolean isNumeric(int c) {
// Fast for valid numeric characters, slower for some incorrect
// ones
try {
return IS_NUMERIC[c];
} catch (ArrayIndexOutOfBoundsException ex) {
return false;
}
}
public static boolean isScheme(int c) {
// Fast for valid scheme characters, slower for some incorrect
// ones
try {
return IS_SCHEME[c];
} catch (ArrayIndexOutOfBoundsException ex) {
return false;
}
}
/**
* Is the provided String a scheme as per RFC 3986? <br>
* Note: scheme = ALPHA *( ALPHA / DIGIT / "+" / "-" / "." ) <br>
* Since a scheme requires at least 1 ALPHA, {@code null} and the empty string ({@code ""}) are not considered to be
* valid tokens.
*
* @param s The string to test
*
* @return {@code true} if the string is a valid scheme, otherwise {@code false}
*/
public static boolean isScheme(String s) {
if (s == null) {
return false;
}
if (s.isEmpty()) {
return false;
}
char[] chars = s.toCharArray();
if (!isAlpha(chars[0])) {
return false;
}
if (chars.length > 1) {
for (int i = 1; i < chars.length; i++) {
if (!isScheme(chars[i])) {
return false;
}
}
}
return true;
}
public static boolean isUserInfo(int c) {
// Fast for valid user info characters, slower for some incorrect
// ones
try {
return IS_USERINFO[c];
} catch (ArrayIndexOutOfBoundsException ex) {
return false;
}
}
private static boolean isRelaxable(int c) {
// Fast for valid user info characters, slower for some incorrect
// ones
try {
return IS_RELAXABLE[c];
} catch (ArrayIndexOutOfBoundsException ex) {
return false;
}
}
public static boolean isAbsolutePath(int c) {
return DEFAULT.isAbsolutePathRelaxed(c);
}
public static boolean isQuery(int c) {
return DEFAULT.isQueryRelaxed(c);
}
public static boolean isControl(int c) {
// Fast for valid control characters, slower for some incorrect
// ones
try {
return IS_CONTROL[c];
} catch (ArrayIndexOutOfBoundsException ex) {
return false;
}
}
// Skip any LWS and position to read the next character. The next character
// is returned as being able to 'peek()' it allows a small optimisation in
// some cases.
static int skipLws(Reader input) throws IOException {
input.mark(1);
int c = input.read();
while (c == 32 || c == 9 || c == 10 || c == 13) {
input.mark(1);
c = input.read();
}
input.reset();
return c;
}
static SkipResult skipConstant(Reader input, String constant) throws IOException {
int len = constant.length();
skipLws(input);
input.mark(len);
int c = input.read();
for (int i = 0; i < len; i++) {
if (i == 0 && c == -1) {
return SkipResult.EOF;
}
if (c != constant.charAt(i)) {
input.reset();
return SkipResult.NOT_FOUND;
}
if (i != (len - 1)) {
c = input.read();
}
}
return SkipResult.FOUND;
}
/**
* @return the token if one was found, the empty string if no data was available to read or <code>null</code> if
* data other than a token was found
*/
static String readToken(Reader input) throws IOException {
StringBuilder result = new StringBuilder();
skipLws(input);
input.mark(1);
int c = input.read();
while (c != -1 && isToken(c)) {
result.append((char) c);
input.mark(1);
c = input.read();
}
// Use mark(1)/reset() rather than skip(-1) since skip() is a NOP
// once the end of the String has been reached.
input.reset();
if (c != -1 && result.isEmpty()) {
return null;
} else {
return result.toString();
}
}
/**
* @return the digits if any were found, the empty string if no data was found or if data other than digits was
* found
*/
static String readDigits(Reader input) throws IOException {
StringBuilder result = new StringBuilder();
skipLws(input);
input.mark(1);
int c = input.read();
while (c != -1 && isNumeric(c)) {
result.append((char) c);
input.mark(1);
c = input.read();
}
// Use mark(1)/reset() rather than skip(-1) since skip() is a NOP
// once the end of the String has been reached.
input.reset();
return result.toString();
}
/**
* @return the number if digits were found, -1 if no data was found or if data other than digits was found
*/
static long readLong(Reader input) throws IOException {
String digits = readDigits(input);
if (digits.isEmpty()) {
return -1;
}
return Long.parseLong(digits);
}
/**
* @return the quoted string if one was found, null if data other than a quoted string was found or null if the end
* of data was reached before the quoted string was terminated
*/
static String readQuotedString(Reader input, boolean returnQuoted) throws IOException {
skipLws(input);
int c = input.read();
if (c != '"') {
return null;
}
StringBuilder result = new StringBuilder();
if (returnQuoted) {
result.append('\"');
}
c = input.read();
while (c != '"') {
if (c == -1) {
return null;
} else if (c == '\\') {
c = input.read();
if (returnQuoted) {
result.append('\\');
}
result.append((char) c);
} else {
result.append((char) c);
}
c = input.read();
}
if (returnQuoted) {
result.append('\"');
}
return result.toString();
}
static String readTokenOrQuotedString(Reader input, boolean returnQuoted) throws IOException {
// Peek at next character to enable correct method to be called
int c = skipLws(input);
if (c == '"') {
return readQuotedString(input, returnQuoted);
} else {
return readToken(input);
}
}
/**
* Token can be read unambiguously with or without surrounding quotes so this parsing method for token permits
* optional surrounding double quotes. This is not defined in any RFC. It is a special case to handle data from
* buggy clients (known buggy clients for DIGEST auth include Microsoft IE 8 & 9, Apple Safari for OSX and iOS)
* that add quotes to values that should be tokens.
*
* @return the token if one was found, null if data other than a token or quoted token was found or null if the end
* of data was reached before a quoted token was terminated
*/
static String readQuotedToken(Reader input) throws IOException {
StringBuilder result = new StringBuilder();
boolean quoted = false;
skipLws(input);
input.mark(1);
int c = input.read();
if (c == '"') {
quoted = true;
} else if (c == -1 || !isToken(c)) {
return null;
} else {
result.append((char) c);
}
input.mark(1);
c = input.read();
while (c != -1 && isToken(c)) {
result.append((char) c);
input.mark(1);
c = input.read();
}
if (quoted) {
if (c != '"') {
return null;
}
} else {
// Use mark(1)/reset() rather than skip(-1) since skip() is a NOP
// once the end of the String has been reached.
input.reset();
}
if (c != -1 && result.isEmpty()) {
return null;
} else {
return result.toString();
}
}
/**
* LHEX can be read unambiguously with or without surrounding quotes so this parsing method for LHEX permits
* optional surrounding double quotes. Some buggy clients (libwww-perl for DIGEST auth) are known to send quoted
* LHEX when the specification requires just LHEX.
* <p>
* LHEX are, literally, lower-case hexadecimal digits. This implementation allows for upper-case digits as well,
* converting the returned value to lower-case.
*
* @return the sequence of LHEX (minus any surrounding quotes) if any was found, or <code>null</code> if data other
* LHEX was found
*/
static String readLhex(Reader input) throws IOException {
StringBuilder result = new StringBuilder();
boolean quoted = false;
skipLws(input);
input.mark(1);
int c = input.read();
if (c == '"') {
quoted = true;
} else if (c == -1 || !isHex(c)) {
return null;
} else {
if ('A' <= c && c <= 'F') {
c -= ('A' - 'a');
}
result.append((char) c);
}
input.mark(1);
c = input.read();
while (c != -1 && isHex(c)) {
if ('A' <= c && c <= 'F') {
c -= ('A' - 'a');
}
result.append((char) c);
input.mark(1);
c = input.read();
}
if (quoted) {
if (c != '"') {
return null;
}
} else {
// Use mark(1)/reset() rather than skip(-1) since skip() is a NOP
// once the end of the String has been reached.
input.reset();
}
if (c != -1 && result.isEmpty()) {
return null;
} else {
return result.toString();
}
}
static double readWeight(Reader input, char delimiter) throws IOException {
skipLws(input);
int c = input.read();
if (c == -1 || c == delimiter) {
// No q value just whitespace
return 1;
} else if (c != 'q') {
// Malformed. Use quality of zero so it is dropped.
skipUntil(input, c, delimiter);
return 0;
}
// RFC 7231 does not allow whitespace here but be tolerant
skipLws(input);
c = input.read();
if (c != '=') {
// Malformed. Use quality of zero so it is dropped.
skipUntil(input, c, delimiter);
return 0;
}
// RFC 7231 does not allow whitespace here but be tolerant
skipLws(input);
c = input.read();
// Should be no more than 3 decimal places
StringBuilder value = new StringBuilder(5);
int decimalPlacesRead = -1;
if (c == '0' || c == '1') {
value.append((char) c);
c = input.read();
while (true) {
if (decimalPlacesRead == -1 && c == '.') {
value.append('.');
decimalPlacesRead = 0;
} else if (decimalPlacesRead > -1 && c >= '0' && c <= '9') {
if (decimalPlacesRead < 3) {
value.append((char) c);
decimalPlacesRead++;
}
} else {
break;
}
c = input.read();
}
} else {
// Malformed. Use quality of zero so it is dropped and skip until
// EOF or the next delimiter
skipUntil(input, c, delimiter);
return 0;
}
if (c == 9 || c == 32) {
skipLws(input);
c = input.read();
}
// Must be at delimiter or EOF
if (c != delimiter && c != -1) {
// Malformed. Use quality of zero so it is dropped and skip until
// EOF or the next delimiter
skipUntil(input, c, delimiter);
return 0;
}
double result = Double.parseDouble(value.toString());
if (result > 1) {
return 0;
}
return result;
}
/**
* @return If inIPv6 is false, the position of ':' that separates the host from the port or -1 if it is not present.
* If inIPv6 is true, the number of characters read
*/
static int readHostIPv4(Reader reader, boolean inIPv6) throws IOException {
int octet = -1;
int octetCount = 1;
int c;
int pos = 0;
// readAheadLimit doesn't matter as all the readers passed to this
// method buffer the entire content.
reader.mark(1);
do {
c = reader.read();
if (c == '.') {
if (octet > -1 && octet < 256) {
// Valid
octetCount++;
octet = -1;
} else if (inIPv6 || octet == -1) {
throw new IllegalArgumentException(sm.getString("http.invalidOctet", Integer.toString(octet)));
} else {
// Might not be an IPv4 address. Could be a host / FQDN with
// a fully numeric component.
reader.reset();
return readHostDomainName(reader);
}
} else if (isNumeric(c)) {
if (octet == -1) {
octet = c - '0';
} else if (octet == 0) {
// Leading zero in non-zero octet. Not valid (ambiguous).
if (inIPv6) {
throw new IllegalArgumentException(sm.getString("http.invalidLeadingZero"));
} else {
// Could be a host/FQDN
reader.reset();
return readHostDomainName(reader);
}
} else {
octet = octet * 10 + c - '0';
// Avoid overflow
if (octet > 255) {
break;
}
}
} else if (c == ':') {
break;
} else if (c == -1) {
if (inIPv6) {
throw new IllegalArgumentException(sm.getString("http.noClosingBracket"));
} else {
pos = -1;
break;
}
} else if (c == ']') {
if (inIPv6) {
pos++;
break;
} else {
throw new IllegalArgumentException(sm.getString("http.closingBracket"));
}
} else if (!inIPv6 && (isAlpha(c) || c == '-')) {
// Go back to the start and parse as a host / FQDN
reader.reset();
return readHostDomainName(reader);
} else {
throw new IllegalArgumentException(
sm.getString("http.illegalCharacterIpv4", Character.toString((char) c)));
}
pos++;
} while (true);
if (octetCount != 4 || octet < 0 || octet > 255) {
// Might not be an IPv4 address. Could be a host name or a FQDN with
// fully numeric components. Go back to the start and parse as a
// host / FQDN.
reader.reset();
return readHostDomainName(reader);
}
if (inIPv6) {
return pos;
} else {
return validatePort(reader, pos);
}
}
/**
* @return The position of ':' that separates the host from the port or -1 if it is not present
*/
static int readHostIPv6(Reader reader) throws IOException {
// Must start with '['
int c = reader.read();
if (c != '[') {
throw new IllegalArgumentException(sm.getString("http.noOpeningBracket"));
}
int h16Count = 0;
int h16Size = 0;
int pos = 1;
boolean parsedDoubleColon = false;
int precedingColonsCount = 0;
do {
c = reader.read();
if (h16Count == 0 && precedingColonsCount == 1 && c != ':') {
// Can't start with a single :
throw new IllegalArgumentException(sm.getString("http.singleColonStart"));
}
if (isHex(c)) {
if (h16Size == 0) {
// Start of a new h16 block
precedingColonsCount = 0;
h16Count++;
}
h16Size++;
if (h16Size > 4) {
throw new IllegalArgumentException(sm.getString("http.invalidHextet"));
}
} else if (c == ':') {
if (precedingColonsCount >= 2) {
// ::: is not allowed
throw new IllegalArgumentException(sm.getString("http.tooManyColons"));
} else {
if (precedingColonsCount == 1) {
// End of ::
if (parsedDoubleColon) {
// Only allowed one :: sequence
throw new IllegalArgumentException(sm.getString("http.tooManyDoubleColons"));
}
parsedDoubleColon = true;
// :: represents at least one h16 block
h16Count++;
}
precedingColonsCount++;
// mark if the next symbol is hex before the actual read
reader.mark(4);
}
h16Size = 0;
} else if (c == ']') {
if (precedingColonsCount == 1) {
// Can't end on a single ':'
throw new IllegalArgumentException(sm.getString("http.singleColonEnd"));
}
pos++;
break;
} else if (c == '.') {
if (h16Count == 7 || h16Count < 7 && parsedDoubleColon) {
reader.reset();
pos -= h16Size;
pos += readHostIPv4(reader, true);
h16Count++;
break;
} else {
throw new IllegalArgumentException(sm.getString("http.invalidIpv4Location"));
}
} else {
throw new IllegalArgumentException(
sm.getString("http.illegalCharacterIpv6", Character.toString((char) c)));
}
pos++;
} while (true);
if (h16Count > 8) {
throw new IllegalArgumentException(sm.getString("http.tooManyHextets", Integer.toString(h16Count)));
} else if (h16Count != 8 && !parsedDoubleColon) {
throw new IllegalArgumentException(sm.getString("http.tooFewHextets", Integer.toString(h16Count)));
}
c = reader.read();
if (c == ':') {
return validatePort(reader, pos);
} else {
if (c == -1) {
return -1;
}
throw new IllegalArgumentException(sm.getString("http.illegalAfterIpv6", Character.toString((char) c)));
}
}
/**
* @return The position of ':' that separates the host from the port or -1 if it is not present
*/
static int readHostDomainName(Reader reader) throws IOException {
DomainParseState state = DomainParseState.NEW;
int pos = 0;
while (state.mayContinue()) {
state = state.next(reader.read());
pos++;
}
if (DomainParseState.COLON == state) {
// State identifies the state of the previous character
return validatePort(reader, pos - 1);
} else {
return -1;
}
}
static int validatePort(Reader reader, int colonPosition) throws IOException {
// Remaining characters should be numeric ...
readLong(reader);
// ... followed by EOS
if (reader.read() == -1) {
return colonPosition;
} else {
// Invalid port
throw new IllegalArgumentException();
}
}
/**
* Skips all characters until EOF or the specified target is found. Normally used to skip invalid input until the
* next separator.
*/
static SkipResult skipUntil(Reader input, int c, char target) throws IOException {
while (c != -1 && c != target) {
c = input.read();
}
if (c == -1) {
return SkipResult.EOF;
} else {
return SkipResult.FOUND;
}
}
private void relax(boolean[] flags, String relaxedChars) {
if (relaxedChars != null && !relaxedChars.isEmpty()) {
char[] chars = relaxedChars.toCharArray();
for (char c : chars) {
if (isRelaxable(c)) {
flags[c] = true;
IS_NOT_REQUEST_TARGET[c] = false;
}
}
}
}
private enum DomainParseState {
NEW(true, false, false, false, "http.invalidCharacterDomain.atStart"),
ALPHA(true, true, true, true, "http.invalidCharacterDomain.afterLetter"),
NUMERIC(true, true, true, true, "http.invalidCharacterDomain.afterNumber"),
PERIOD(true, false, false, true, "http.invalidCharacterDomain.afterPeriod"),
HYPHEN(true, true, false, false, "http.invalidCharacterDomain.afterHyphen"),
COLON(false, false, false, false, "http.invalidCharacterDomain.afterColon"),
END(false, false, false, false, "http.invalidCharacterDomain.atEnd");
private final boolean mayContinue;
private final boolean allowsHyphen;
private final boolean allowsPeriod;
private final boolean allowsEnd;
private final String errorMsg;
DomainParseState(boolean mayContinue, boolean allowsHyphen, boolean allowsPeriod, boolean allowsEnd,
String errorMsg) {
this.mayContinue = mayContinue;
this.allowsHyphen = allowsHyphen;
this.allowsPeriod = allowsPeriod;
this.allowsEnd = allowsEnd;
this.errorMsg = errorMsg;
}
public boolean mayContinue() {
return mayContinue;
}
public DomainParseState next(int c) {
if (c == -1) {
if (allowsEnd) {
return END;
} else {
throw new IllegalArgumentException(sm.getString("http.invalidSegmentEndState", this.name()));
}
} else if (isAlpha(c)) {
return ALPHA;
} else if (isNumeric(c)) {
return NUMERIC;
} else if (c == '.') {
if (allowsPeriod) {
return PERIOD;
} else {
throw new IllegalArgumentException(sm.getString(errorMsg, Character.toString((char) c)));
}
} else if (c == ':') {
if (allowsEnd) {
return COLON;
} else {
throw new IllegalArgumentException(sm.getString(errorMsg, Character.toString((char) c)));
}
} else if (c == '-') {
if (allowsHyphen) {
return HYPHEN;
} else {
throw new IllegalArgumentException(sm.getString(errorMsg, Character.toString((char) c)));
}
} else {
throw new IllegalArgumentException(
sm.getString("http.illegalCharacterDomain", Character.toString((char) c)));
}
}
}
}
|
googleapis/google-cloud-java | 35,001 | java-aiplatform/proto-google-cloud-aiplatform-v1/src/main/java/com/google/cloud/aiplatform/v1/SummarizationQualityInput.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/aiplatform/v1/evaluation_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.aiplatform.v1;
/**
*
*
* <pre>
* Input for summarization quality metric.
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1.SummarizationQualityInput}
*/
public final class SummarizationQualityInput extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1.SummarizationQualityInput)
SummarizationQualityInputOrBuilder {
private static final long serialVersionUID = 0L;
// Use SummarizationQualityInput.newBuilder() to construct.
private SummarizationQualityInput(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private SummarizationQualityInput() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new SummarizationQualityInput();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1.EvaluationServiceProto
.internal_static_google_cloud_aiplatform_v1_SummarizationQualityInput_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1.EvaluationServiceProto
.internal_static_google_cloud_aiplatform_v1_SummarizationQualityInput_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1.SummarizationQualityInput.class,
com.google.cloud.aiplatform.v1.SummarizationQualityInput.Builder.class);
}
private int bitField0_;
public static final int METRIC_SPEC_FIELD_NUMBER = 1;
private com.google.cloud.aiplatform.v1.SummarizationQualitySpec metricSpec_;
/**
*
*
* <pre>
* Required. Spec for summarization quality score metric.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.SummarizationQualitySpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the metricSpec field is set.
*/
@java.lang.Override
public boolean hasMetricSpec() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. Spec for summarization quality score metric.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.SummarizationQualitySpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The metricSpec.
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1.SummarizationQualitySpec getMetricSpec() {
return metricSpec_ == null
? com.google.cloud.aiplatform.v1.SummarizationQualitySpec.getDefaultInstance()
: metricSpec_;
}
/**
*
*
* <pre>
* Required. Spec for summarization quality score metric.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.SummarizationQualitySpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1.SummarizationQualitySpecOrBuilder getMetricSpecOrBuilder() {
return metricSpec_ == null
? com.google.cloud.aiplatform.v1.SummarizationQualitySpec.getDefaultInstance()
: metricSpec_;
}
public static final int INSTANCE_FIELD_NUMBER = 2;
private com.google.cloud.aiplatform.v1.SummarizationQualityInstance instance_;
/**
*
*
* <pre>
* Required. Summarization quality instance.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.SummarizationQualityInstance instance = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the instance field is set.
*/
@java.lang.Override
public boolean hasInstance() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. Summarization quality instance.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.SummarizationQualityInstance instance = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The instance.
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1.SummarizationQualityInstance getInstance() {
return instance_ == null
? com.google.cloud.aiplatform.v1.SummarizationQualityInstance.getDefaultInstance()
: instance_;
}
/**
*
*
* <pre>
* Required. Summarization quality instance.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.SummarizationQualityInstance instance = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1.SummarizationQualityInstanceOrBuilder
getInstanceOrBuilder() {
return instance_ == null
? com.google.cloud.aiplatform.v1.SummarizationQualityInstance.getDefaultInstance()
: instance_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getMetricSpec());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getInstance());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getMetricSpec());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getInstance());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.aiplatform.v1.SummarizationQualityInput)) {
return super.equals(obj);
}
com.google.cloud.aiplatform.v1.SummarizationQualityInput other =
(com.google.cloud.aiplatform.v1.SummarizationQualityInput) obj;
if (hasMetricSpec() != other.hasMetricSpec()) return false;
if (hasMetricSpec()) {
if (!getMetricSpec().equals(other.getMetricSpec())) return false;
}
if (hasInstance() != other.hasInstance()) return false;
if (hasInstance()) {
if (!getInstance().equals(other.getInstance())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasMetricSpec()) {
hash = (37 * hash) + METRIC_SPEC_FIELD_NUMBER;
hash = (53 * hash) + getMetricSpec().hashCode();
}
if (hasInstance()) {
hash = (37 * hash) + INSTANCE_FIELD_NUMBER;
hash = (53 * hash) + getInstance().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.aiplatform.v1.SummarizationQualityInput parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.SummarizationQualityInput parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.SummarizationQualityInput parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.SummarizationQualityInput parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.SummarizationQualityInput parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.SummarizationQualityInput parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.SummarizationQualityInput parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.SummarizationQualityInput parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.SummarizationQualityInput parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.SummarizationQualityInput parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.SummarizationQualityInput parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.SummarizationQualityInput parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.aiplatform.v1.SummarizationQualityInput prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Input for summarization quality metric.
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1.SummarizationQualityInput}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1.SummarizationQualityInput)
com.google.cloud.aiplatform.v1.SummarizationQualityInputOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1.EvaluationServiceProto
.internal_static_google_cloud_aiplatform_v1_SummarizationQualityInput_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1.EvaluationServiceProto
.internal_static_google_cloud_aiplatform_v1_SummarizationQualityInput_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1.SummarizationQualityInput.class,
com.google.cloud.aiplatform.v1.SummarizationQualityInput.Builder.class);
}
// Construct using com.google.cloud.aiplatform.v1.SummarizationQualityInput.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getMetricSpecFieldBuilder();
getInstanceFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
metricSpec_ = null;
if (metricSpecBuilder_ != null) {
metricSpecBuilder_.dispose();
metricSpecBuilder_ = null;
}
instance_ = null;
if (instanceBuilder_ != null) {
instanceBuilder_.dispose();
instanceBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.aiplatform.v1.EvaluationServiceProto
.internal_static_google_cloud_aiplatform_v1_SummarizationQualityInput_descriptor;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.SummarizationQualityInput getDefaultInstanceForType() {
return com.google.cloud.aiplatform.v1.SummarizationQualityInput.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.SummarizationQualityInput build() {
com.google.cloud.aiplatform.v1.SummarizationQualityInput result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.SummarizationQualityInput buildPartial() {
com.google.cloud.aiplatform.v1.SummarizationQualityInput result =
new com.google.cloud.aiplatform.v1.SummarizationQualityInput(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.aiplatform.v1.SummarizationQualityInput result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.metricSpec_ = metricSpecBuilder_ == null ? metricSpec_ : metricSpecBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.instance_ = instanceBuilder_ == null ? instance_ : instanceBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.aiplatform.v1.SummarizationQualityInput) {
return mergeFrom((com.google.cloud.aiplatform.v1.SummarizationQualityInput) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.aiplatform.v1.SummarizationQualityInput other) {
if (other == com.google.cloud.aiplatform.v1.SummarizationQualityInput.getDefaultInstance())
return this;
if (other.hasMetricSpec()) {
mergeMetricSpec(other.getMetricSpec());
}
if (other.hasInstance()) {
mergeInstance(other.getInstance());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getMetricSpecFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getInstanceFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.cloud.aiplatform.v1.SummarizationQualitySpec metricSpec_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1.SummarizationQualitySpec,
com.google.cloud.aiplatform.v1.SummarizationQualitySpec.Builder,
com.google.cloud.aiplatform.v1.SummarizationQualitySpecOrBuilder>
metricSpecBuilder_;
/**
*
*
* <pre>
* Required. Spec for summarization quality score metric.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.SummarizationQualitySpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the metricSpec field is set.
*/
public boolean hasMetricSpec() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. Spec for summarization quality score metric.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.SummarizationQualitySpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The metricSpec.
*/
public com.google.cloud.aiplatform.v1.SummarizationQualitySpec getMetricSpec() {
if (metricSpecBuilder_ == null) {
return metricSpec_ == null
? com.google.cloud.aiplatform.v1.SummarizationQualitySpec.getDefaultInstance()
: metricSpec_;
} else {
return metricSpecBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. Spec for summarization quality score metric.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.SummarizationQualitySpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setMetricSpec(com.google.cloud.aiplatform.v1.SummarizationQualitySpec value) {
if (metricSpecBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
metricSpec_ = value;
} else {
metricSpecBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Spec for summarization quality score metric.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.SummarizationQualitySpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setMetricSpec(
com.google.cloud.aiplatform.v1.SummarizationQualitySpec.Builder builderForValue) {
if (metricSpecBuilder_ == null) {
metricSpec_ = builderForValue.build();
} else {
metricSpecBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Spec for summarization quality score metric.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.SummarizationQualitySpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeMetricSpec(com.google.cloud.aiplatform.v1.SummarizationQualitySpec value) {
if (metricSpecBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& metricSpec_ != null
&& metricSpec_
!= com.google.cloud.aiplatform.v1.SummarizationQualitySpec.getDefaultInstance()) {
getMetricSpecBuilder().mergeFrom(value);
} else {
metricSpec_ = value;
}
} else {
metricSpecBuilder_.mergeFrom(value);
}
if (metricSpec_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. Spec for summarization quality score metric.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.SummarizationQualitySpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearMetricSpec() {
bitField0_ = (bitField0_ & ~0x00000001);
metricSpec_ = null;
if (metricSpecBuilder_ != null) {
metricSpecBuilder_.dispose();
metricSpecBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Spec for summarization quality score metric.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.SummarizationQualitySpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.aiplatform.v1.SummarizationQualitySpec.Builder getMetricSpecBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getMetricSpecFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. Spec for summarization quality score metric.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.SummarizationQualitySpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.aiplatform.v1.SummarizationQualitySpecOrBuilder
getMetricSpecOrBuilder() {
if (metricSpecBuilder_ != null) {
return metricSpecBuilder_.getMessageOrBuilder();
} else {
return metricSpec_ == null
? com.google.cloud.aiplatform.v1.SummarizationQualitySpec.getDefaultInstance()
: metricSpec_;
}
}
/**
*
*
* <pre>
* Required. Spec for summarization quality score metric.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.SummarizationQualitySpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1.SummarizationQualitySpec,
com.google.cloud.aiplatform.v1.SummarizationQualitySpec.Builder,
com.google.cloud.aiplatform.v1.SummarizationQualitySpecOrBuilder>
getMetricSpecFieldBuilder() {
if (metricSpecBuilder_ == null) {
metricSpecBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1.SummarizationQualitySpec,
com.google.cloud.aiplatform.v1.SummarizationQualitySpec.Builder,
com.google.cloud.aiplatform.v1.SummarizationQualitySpecOrBuilder>(
getMetricSpec(), getParentForChildren(), isClean());
metricSpec_ = null;
}
return metricSpecBuilder_;
}
private com.google.cloud.aiplatform.v1.SummarizationQualityInstance instance_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1.SummarizationQualityInstance,
com.google.cloud.aiplatform.v1.SummarizationQualityInstance.Builder,
com.google.cloud.aiplatform.v1.SummarizationQualityInstanceOrBuilder>
instanceBuilder_;
/**
*
*
* <pre>
* Required. Summarization quality instance.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.SummarizationQualityInstance instance = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the instance field is set.
*/
public boolean hasInstance() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. Summarization quality instance.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.SummarizationQualityInstance instance = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The instance.
*/
public com.google.cloud.aiplatform.v1.SummarizationQualityInstance getInstance() {
if (instanceBuilder_ == null) {
return instance_ == null
? com.google.cloud.aiplatform.v1.SummarizationQualityInstance.getDefaultInstance()
: instance_;
} else {
return instanceBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. Summarization quality instance.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.SummarizationQualityInstance instance = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setInstance(com.google.cloud.aiplatform.v1.SummarizationQualityInstance value) {
if (instanceBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
instance_ = value;
} else {
instanceBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Summarization quality instance.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.SummarizationQualityInstance instance = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setInstance(
com.google.cloud.aiplatform.v1.SummarizationQualityInstance.Builder builderForValue) {
if (instanceBuilder_ == null) {
instance_ = builderForValue.build();
} else {
instanceBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Summarization quality instance.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.SummarizationQualityInstance instance = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeInstance(
com.google.cloud.aiplatform.v1.SummarizationQualityInstance value) {
if (instanceBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& instance_ != null
&& instance_
!= com.google.cloud.aiplatform.v1.SummarizationQualityInstance
.getDefaultInstance()) {
getInstanceBuilder().mergeFrom(value);
} else {
instance_ = value;
}
} else {
instanceBuilder_.mergeFrom(value);
}
if (instance_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. Summarization quality instance.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.SummarizationQualityInstance instance = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearInstance() {
bitField0_ = (bitField0_ & ~0x00000002);
instance_ = null;
if (instanceBuilder_ != null) {
instanceBuilder_.dispose();
instanceBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Summarization quality instance.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.SummarizationQualityInstance instance = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.aiplatform.v1.SummarizationQualityInstance.Builder
getInstanceBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getInstanceFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. Summarization quality instance.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.SummarizationQualityInstance instance = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.aiplatform.v1.SummarizationQualityInstanceOrBuilder
getInstanceOrBuilder() {
if (instanceBuilder_ != null) {
return instanceBuilder_.getMessageOrBuilder();
} else {
return instance_ == null
? com.google.cloud.aiplatform.v1.SummarizationQualityInstance.getDefaultInstance()
: instance_;
}
}
/**
*
*
* <pre>
* Required. Summarization quality instance.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.SummarizationQualityInstance instance = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1.SummarizationQualityInstance,
com.google.cloud.aiplatform.v1.SummarizationQualityInstance.Builder,
com.google.cloud.aiplatform.v1.SummarizationQualityInstanceOrBuilder>
getInstanceFieldBuilder() {
if (instanceBuilder_ == null) {
instanceBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1.SummarizationQualityInstance,
com.google.cloud.aiplatform.v1.SummarizationQualityInstance.Builder,
com.google.cloud.aiplatform.v1.SummarizationQualityInstanceOrBuilder>(
getInstance(), getParentForChildren(), isClean());
instance_ = null;
}
return instanceBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1.SummarizationQualityInput)
}
// @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1.SummarizationQualityInput)
private static final com.google.cloud.aiplatform.v1.SummarizationQualityInput DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1.SummarizationQualityInput();
}
public static com.google.cloud.aiplatform.v1.SummarizationQualityInput getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<SummarizationQualityInput> PARSER =
new com.google.protobuf.AbstractParser<SummarizationQualityInput>() {
@java.lang.Override
public SummarizationQualityInput parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<SummarizationQualityInput> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<SummarizationQualityInput> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.SummarizationQualityInput getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/openjpa | 35,066 | openjpa-jdbc/src/main/java/org/apache/openjpa/jdbc/schema/ForeignKey.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.openjpa.jdbc.schema;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Objects;
import org.apache.openjpa.jdbc.identifier.DBIdentifier;
import org.apache.openjpa.jdbc.sql.DBDictionary;
import org.apache.openjpa.lib.log.Log;
import org.apache.openjpa.lib.util.Localizer;
import org.apache.openjpa.lib.util.StringDistance;
import org.apache.openjpa.util.InvalidStateException;
/**
* Represents a database foreign key; may be a logical key with no
* database representation. This class can also represent a partial key,
* aligning with {@link DatabaseMetaData}.
*
* @author Abe White
*/
public class ForeignKey extends Constraint {
private static final long serialVersionUID = 1L;
/**
* Logical foreign key; links columns, but does not perform any action
* when the joined primary key columns are modified.
*/
public static final int ACTION_NONE = 1;
/**
* Throw an exception if joined primary key columns are modified.
*/
public static final int ACTION_RESTRICT = 2;
/**
* Cascade any modification of the joined primary key columns to
* this table. If the joined primary key row is deleted, the row in this
* table will also be deleted.
*/
public static final int ACTION_CASCADE = 3;
/**
* Null the local columns if the joined primary key columns are modified.
*/
public static final int ACTION_NULL = 4;
/**
* Set the local columns to their default values if the primary key
* columns are modified.
*/
public static final int ACTION_DEFAULT = 5;
private static final Localizer _loc =
Localizer.forPackage(ForeignKey.class);
private DBIdentifier _pkTableName = DBIdentifier.NULL;
private DBIdentifier _pkSchemaName = DBIdentifier.NULL;
private DBIdentifier _pkColumnName = DBIdentifier.NULL;
private int _seq = 0;
private LinkedHashMap _joins = null;
private LinkedHashMap _joinsPK = null;
private LinkedHashMap _consts = null;
private LinkedHashMap _constsPK = null;
private int _delAction = ACTION_NONE;
private int _upAction = ACTION_NONE;
private int _index = 0;
// cached items
private Column[] _locals = null;
private Column[] _pks = null;
private Object[] _constVals = null;
private Column[] _constCols = null;
private Object[] _constValsPK = null;
private Column[] _constColsPK = null;
private Table _pkTable = null;
private Boolean _autoAssign = null;
/**
* Return the foreign key action constant for the given action name.
*/
public static int getAction(String name) {
if (name == null || "none".equalsIgnoreCase(name))
return ACTION_NONE;
if ("cascade".equalsIgnoreCase(name))
return ACTION_CASCADE;
if ("default".equalsIgnoreCase(name))
return ACTION_DEFAULT;
if ("restrict".equalsIgnoreCase(name)
|| "exception".equalsIgnoreCase(name))
return ACTION_RESTRICT;
if ("null".equalsIgnoreCase(name))
return ACTION_NULL;
// not a recognized action; check for typo
List recognized = Arrays.asList(new String[]{ "none", "exception",
"restrict", "cascade", "null", "default", });
String closest = StringDistance.getClosestLevenshteinDistance(name,
recognized, .5F);
String msg;
if (closest != null)
msg = _loc.get("bad-fk-action-hint", name, closest, recognized)
.getMessage();
else
msg = _loc.get("bad-fk-action", name, recognized).getMessage();
throw new IllegalArgumentException(msg);
}
/**
* Return the foreign key action name for the given action constant.
*/
public static String getActionName(int action) {
switch (action) {
case ACTION_NONE:
return "none";
case ACTION_RESTRICT:
return "restrict";
case ACTION_CASCADE:
return "cascade";
case ACTION_DEFAULT:
return "default";
case ACTION_NULL:
return "null";
default:
throw new IllegalArgumentException(String.valueOf(action));
}
}
/**
* Default constructor.
*/
public ForeignKey() {
}
/**
* Constructor.
*
* @param name the foreign key name, if any
* @param table the local table of the foreign key
* @deprecated
*/
@Deprecated
public ForeignKey(String name, Table table) {
super(name, table);
}
public ForeignKey(DBIdentifier name, Table table) {
super(name, table);
}
@Override
public boolean isLogical() {
return _delAction == ACTION_NONE;
}
/**
* Whether the primary key columns of this key are auto-incrementing, or
* whether they themselves are members of a foreign key who's primary key
* is auto-incrementing (recursing to arbitrary depth).
*/
public boolean isPrimaryKeyAutoAssigned() {
if (_autoAssign != null)
return _autoAssign;
return isPrimaryKeyAutoAssigned(new ArrayList(3));
}
/**
* Helper to calculate whether this foreign key depends on auto-assigned
* columns. Recurses appropriately if the primary key columns this key
* joins to are themselves members of a foreign key that is dependent on
* auto-assigned columns. Caches calculated auto-assign value as a side
* effect.
*
* @param seen track seen foreign keys to prevent infinite recursion in
* the case of foreign key cycles
*/
private boolean isPrimaryKeyAutoAssigned(List seen) {
if (_autoAssign != null)
return _autoAssign;
Column[] cols = getPrimaryKeyColumns();
if (cols.length == 0) {
_autoAssign = Boolean.FALSE;
return false;
}
for (Column column : cols) {
if (column.isAutoAssigned()) {
_autoAssign = Boolean.TRUE;
return true;
}
}
ForeignKey[] fks = _pkTable.getForeignKeys();
seen.add(this);
for (Column col : cols) {
for (ForeignKey fk : fks) {
if (!fk.containsColumn(col))
continue;
if (!seen.contains(fk)
&& fk.isPrimaryKeyAutoAssigned(seen)) {
_autoAssign = Boolean.TRUE;
return true;
}
}
}
_autoAssign = Boolean.FALSE;
return false;
}
/**
* The name of the primary key table.
* @deprecated
*/
@Deprecated
public String getPrimaryKeyTableName() {
return getPrimaryKeyTableIdentifier().getName();
}
public DBIdentifier getPrimaryKeyTableIdentifier() {
Table table = getPrimaryKeyTable();
if (table != null)
return table.getIdentifier();
return _pkTableName == null ? DBIdentifier.NULL : _pkTableName;
}
/**
* The name of the primary key table. You can only set the primary
* key table name on foreign keys that have not already been joined.
* @deprecated
*/
@Deprecated
public void setPrimaryKeyTableName(String pkTableName) {
setPrimaryKeyTableIdentifier(DBIdentifier.newTable(pkTableName));
}
public void setPrimaryKeyTableIdentifier(DBIdentifier pkTableName) {
if (getPrimaryKeyTable() != null)
throw new IllegalStateException();
_pkTableName = pkTableName;
}
/**
* The name of the primary key table's schema.
* @deprecated
*/
@Deprecated
public String getPrimaryKeySchemaName() {
return getPrimaryKeySchemaIdentifier().getName();
}
public DBIdentifier getPrimaryKeySchemaIdentifier() {
Table table = getPrimaryKeyTable();
if (table != null)
return table.getSchemaIdentifier();
return _pkSchemaName;
}
/**
* The name of the primary key table's schema. You can only set the
* primary key schema name on foreign keys that have not already been
* joined.
* @deprecated
*/
@Deprecated
public void setPrimaryKeySchemaName(String pkSchemaName) {
setPrimaryKeySchemaIdentifier(DBIdentifier.newSchema(pkSchemaName));
}
public void setPrimaryKeySchemaIdentifier(DBIdentifier pkSchemaName) {
if (getPrimaryKeyTable() != null)
throw new IllegalStateException();
_pkSchemaName = pkSchemaName;
}
/**
* The name of the primary key column.
* @deprecated
*/
@Deprecated
public String getPrimaryKeyColumnName() {
return getPrimaryKeyColumnIdentifier().getName();
}
public DBIdentifier getPrimaryKeyColumnIdentifier() {
return _pkColumnName == null ? DBIdentifier.NULL : _pkColumnName;
}
/**
* The name of the primary key column. You can only set the
* primary key column name on foreign keys that have not already been
* joined.
* @deprecated
*/
@Deprecated
public void setPrimaryKeyColumnName(String pkColumnName) {
setPrimaryKeyColumnIdentifier(DBIdentifier.newColumn(pkColumnName));
}
public void setPrimaryKeyColumnIdentifier(DBIdentifier pkColumnName) {
if (getPrimaryKeyTable() != null)
throw new IllegalStateException();
_pkColumnName = pkColumnName;
}
/**
* The sequence of this join in the foreign key.
*/
public int getKeySequence() {
return _seq;
}
/**
* The sequence of this join in the foreign key.
*/
public void setKeySequence(int seq) {
_seq = seq;
}
/**
* Return the delete action for the key. Will be one of:
* {@link #ACTION_NONE}, {@link #ACTION_RESTRICT},
* {@link #ACTION_CASCADE}, {@link #ACTION_NULL}, {@link #ACTION_DEFAULT}.
*/
public int getDeleteAction() {
return _delAction;
}
/**
* Set the delete action for the key. Must be one of:
* {@link #ACTION_NONE}, {@link #ACTION_RESTRICT},
* {@link #ACTION_CASCADE}, {@link #ACTION_NULL}, {@link #ACTION_DEFAULT}.
*/
public void setDeleteAction(int action) {
_delAction = action;
if (action == ACTION_NONE)
_upAction = ACTION_NONE;
else if (_upAction == ACTION_NONE)
_upAction = ACTION_RESTRICT;
}
/**
* Return the update action for the key. Will be one of:
* {@link #ACTION_NONE}, {@link #ACTION_RESTRICT},
* {@link #ACTION_CASCADE}, {@link #ACTION_NULL}, {@link #ACTION_DEFAULT}.
*/
public int getUpdateAction() {
return _upAction;
}
/**
* Set the update action for the key. Must be one of:
* {@link #ACTION_NONE}, {@link #ACTION_RESTRICT},
* {@link #ACTION_CASCADE}, {@link #ACTION_NULL}, {@link #ACTION_DEFAULT}.
*/
public void setUpdateAction(int action) {
_upAction = action;
if (action == ACTION_NONE)
_delAction = ACTION_NONE;
else if (_delAction == ACTION_NONE)
_delAction = ACTION_RESTRICT;
}
/**
* Return the foreign key's 0-based index in the owning table.
*/
public int getIndex() {
Table table = getTable();
if (table != null)
table.indexForeignKeys();
return _index;
}
/**
* Set the foreign key's 0-based index in the owning table.
*/
void setIndex(int index) {
_index = index;
}
/**
* Return the primary key column joined to the given local column.
*/
public Column getPrimaryKeyColumn(Column local) {
return (_joins == null) ? null : (Column) _joins.get(local);
}
/**
* Return the local column joined to the given primary key column.
*/
public Column getColumn(Column pk) {
return (_joinsPK == null) ? null : (Column) _joinsPK.get(pk);
}
/**
* Return the constant value assigned to the given local column.
*/
public Object getConstant(Column local) {
return (_consts == null) ? null : _consts.get(local);
}
/**
* Return the constant value assigned to the given primary key column.
*/
public Object getPrimaryKeyConstant(Column pk) {
return (_constsPK == null) ? null : _constsPK.get(pk);
}
/**
* Return the local columns in the foreign key local table order.
*/
public Column[] getColumns() {
if (_locals == null)
_locals = (_joins == null) ? Schemas.EMPTY_COLUMNS : (Column[])
_joins.keySet().toArray(new Column[_joins.size()]);
return _locals;
}
/**
* Return the constant values assigned to the local columns
* returned by {@link #getConstantColumns}.
*/
public Object[] getConstants() {
if (_constVals == null)
_constVals = (_consts == null) ? Schemas.EMPTY_VALUES
: _consts.values().toArray();
return _constVals;
}
/**
* Return the constant values assigned to the primary key columns
* returned by {@link #getConstantPrimaryKeyColumns}.
*/
public Object[] getPrimaryKeyConstants() {
if (_constValsPK == null)
_constValsPK = (_constsPK == null) ? Schemas.EMPTY_VALUES
: _constsPK.values().toArray();
return _constValsPK;
}
/**
* Return true if the fk includes the given local column.
*/
public boolean containsColumn(Column col) {
return _joins != null && _joins.containsKey(col);
}
/**
* Return true if the fk includes the given primary key column.
*/
public boolean containsPrimaryKeyColumn(Column col) {
return _joinsPK != null && _joinsPK.containsKey(col);
}
/**
* Return true if the fk includes the given local column.
*/
public boolean containsConstantColumn(Column col) {
return _consts != null && _consts.containsKey(col);
}
/**
* Return true if the fk includes the given primary key column.
*/
public boolean containsConstantPrimaryKeyColumn(Column col) {
return _constsPK != null && _constsPK.containsKey(col);
}
/**
* Return the foreign columns in the foreign key, in join-order with
* the result of {@link #getColumns}.
*/
public Column[] getPrimaryKeyColumns() {
if (_pks == null)
_pks = (_joins == null) ? Schemas.EMPTY_COLUMNS : (Column[])
_joins.values().toArray(new Column[_joins.size()]);
return _pks;
}
/**
* Return the local columns that we link to using constant values.
*/
public Column[] getConstantColumns() {
if (_constCols == null)
_constCols = (_consts == null) ? Schemas.EMPTY_COLUMNS : (Column[])
_consts.keySet().toArray(new Column[_consts.size()]);
return _constCols;
}
/**
* Return the primary key columns that we link to using constant values.
*/
public Column[] getConstantPrimaryKeyColumns() {
if (_constColsPK == null)
_constColsPK = (_constsPK == null) ? Schemas.EMPTY_COLUMNS :
(Column[]) _constsPK.keySet().toArray
(new Column[_constsPK.size()]);
return _constColsPK;
}
/**
* Set the foreign key's joins.
*/
public void setJoins(Column[] cols, Column[] pkCols) {
Column[] cur = getColumns();
for (Column column : cur) {
removeJoin(column);
}
if (cols != null)
for (int i = 0; i < cols.length; i++)
join(cols[i], pkCols[i]);
}
/**
* Set the foreign key's constant joins.
*/
public void setConstantJoins(Object[] consts, Column[] pkCols) {
Column[] cur = getConstantPrimaryKeyColumns();
for (Column column : cur) {
removeJoin(column);
}
if (consts != null)
for (int i = 0; i < consts.length; i++)
joinConstant(consts[i], pkCols[i]);
}
/**
* Set the foreign key's constant joins.
*/
public void setConstantJoins(Column[] cols, Object[] consts) {
Column[] cur = getConstantColumns();
for (Column column : cur) {
removeJoin(column);
}
if (consts != null)
for (int i = 0; i < consts.length; i++)
joinConstant(cols[i], consts[i]);
}
/**
* Join a local column to a primary key column of another table.
*/
public void join(Column local, Column toPK) {
if (!Objects.equals(local.getTable(), getTable()))
throw new InvalidStateException(_loc.get("table-mismatch",
local.getTable(), getTable()));
Table pkTable = toPK.getTable();
if (_pkTable != null && !_pkTable.equals(pkTable))
throw new InvalidStateException(_loc.get("fk-mismatch",
pkTable, _pkTable));
_pkTable = pkTable;
if (_joins == null)
_joins = new LinkedHashMap();
_joins.put(local, toPK);
local.addConstraint(this);
if (_joinsPK == null)
_joinsPK = new LinkedHashMap();
_joinsPK.put(toPK, local);
// force re-cache
_locals = null;
_pks = null;
if (Boolean.FALSE.equals(_autoAssign))
_autoAssign = null;
}
/**
* Join a constant value to a primary key column of another table. The
* constant must be either a string or a number.
*/
public void joinConstant(Object val, Column toPK) {
Table pkTable = toPK.getTable();
if (_pkTable != null && !_pkTable.equals(pkTable))
throw new InvalidStateException(_loc.get("fk-mismatch",
pkTable, _pkTable));
_pkTable = pkTable;
if (_constsPK == null)
_constsPK = new LinkedHashMap();
_constsPK.put(toPK, val);
// force re-cache
_constValsPK = null;
_constColsPK = null;
}
/**
* Join a constant value to a local column of this table. The
* constant must be either a string or a number.
*/
public void joinConstant(Column col, Object val) {
if (_consts == null)
_consts = new LinkedHashMap();
_consts.put(col, val);
// force re-cache
_constVals = null;
_constCols = null;
}
/**
* Remove any joins inolving the given column.
*
* @return true if the join was removed, false if not part of the key
*/
public boolean removeJoin(Column col) {
boolean remd = false;
Object rem;
if (_joins != null) {
rem = _joins.remove(col);
col.removeConstraint(this);
if (rem != null) {
_locals = null;
_pks = null;
_joinsPK.remove(rem);
remd = true;
}
}
if (_joinsPK != null) {
rem = _joinsPK.remove(col);
if (rem != null) {
_locals = null;
_pks = null;
_joins.remove(rem);
remd = true;
}
}
if (_consts != null) {
if (_consts.remove(col) != null) {
_constVals = null;
_constCols = null;
remd = true;
}
}
if (_constsPK != null) {
if (_constsPK.containsKey(col)) {
_constsPK.remove(col);
_constValsPK = null;
_constColsPK = null;
remd = true;
}
}
if ((_joins == null || _joins.isEmpty())
&& (_constsPK == null || _constsPK.isEmpty()))
_pkTable = null;
if (remd && Boolean.TRUE.equals(_autoAssign))
_autoAssign = null;
return remd;
}
/**
* Returns the table this foreign key is linking to, if it is known yet.
*/
public Table getPrimaryKeyTable() {
return _pkTable;
}
/**
* Ref all columns in this key.
*/
public void refColumns() {
Column[] cols = getColumns();
for (Column column : cols) {
column.ref();
}
cols = getConstantColumns();
for (Column col : cols) {
col.ref();
}
}
/**
* Deref all columns in this key.
*/
public void derefColumns() {
Column[] cols = getColumns();
for (Column column : cols) {
column.deref();
}
cols = getConstantColumns();
for (Column col : cols) {
col.deref();
}
}
/**
* Foreign keys are equal if the satisfy the equality constraints of
* {@link Constraint} and they have the same local and primary key
* columns and action.
*/
public boolean equalsForeignKey(ForeignKey fk) {
if (fk == this)
return true;
if (fk == null)
return false;
if (getDeleteAction() != fk.getDeleteAction())
return false;
if (isDeferred() != fk.isDeferred())
return false;
if (!columnsMatch(fk.getColumns(), fk.getPrimaryKeyColumns()))
return false;
if (!match(getConstantColumns(), fk.getConstantColumns()))
return false;
if (!match(getConstants(), fk.getConstants()))
return false;
if (!match(getConstantPrimaryKeyColumns(),
fk.getConstantPrimaryKeyColumns()))
return false;
if (!match(getPrimaryKeyConstants(), fk.getPrimaryKeyConstants()))
return false;
return true;
}
/**
* Return true if the given local and foreign columns match those
* on this key. This can be used to find foreign keys given only
* column linking information.
*/
public boolean columnsMatch(Column[] fkCols, Column[] fkPKCols) {
return match(getColumns(), fkCols)
&& match(getPrimaryKeyColumns(), fkPKCols);
}
/**
* Checks for non-nullable local columns.
*/
public boolean hasNotNullColumns() {
Column[] columns = getColumns();
for (Column column : columns) {
if (column.isNotNull()) {
return true;
}
}
return false;
}
private static boolean match(Column[] cols, Column[] fkCols) {
if (cols.length != fkCols.length)
return false;
for (Column fkCol : fkCols)
if (!hasColumn(cols, fkCol))
return false;
return true;
}
private static boolean hasColumn(Column[] cols, Column col) {
for (Column column : cols)
if (column.getQualifiedPath().equals(col.getQualifiedPath()))
return true;
return false;
}
private static boolean match(Object[] vals, Object[] fkVals) {
if (vals.length != fkVals.length)
return false;
for (int i = 0; i < vals.length; i++)
if (!Objects.equals(vals[i], fkVals[i]))
return false;
return true;
}
/**
* Return the name of the foreignkey constraint as defined in the database.
* @deprecated
*/
@Deprecated
public String loadNameFromDB(DBDictionary dbdict, Connection conn) {
return loadIdentifierFromDB(dbdict, conn).getName();
}
public DBIdentifier loadIdentifierFromDB(DBDictionary dbdict, Connection conn) {
if( isLogical() || getTable() == null)
return DBIdentifier.NULL;
DBIdentifier retVal = DBIdentifier.NULL;
try{
Schema schema = getTable().getSchema();
ForeignKey[] fks = dbdict.getImportedKeys(conn.getMetaData(),
DBIdentifier.newCatalog(conn.getCatalog()), schema.getIdentifier(),
getTable().getIdentifier(), conn, false);
for (ForeignKey fk : fks) {
Table localtable = schema.getTable(fk.getTableIdentifier());
Table pkTable = schema.getTable(
fk.getPrimaryKeyTableIdentifier());
boolean addFK = false;
ForeignKey fkTemp = localtable.getForeignKey(
fk.getIdentifier());
if (fkTemp == null) {
addFK = true;
fkTemp = localtable.addForeignKey(
fk.getIdentifier());
fkTemp.setDeferred(fk.isDeferred());
fkTemp.setDeleteAction(fk.getDeleteAction());
}
if (fk.getColumns() == null || fk.getColumns().length == 0) {
// Singular column foreign key
if (!fkTemp.containsColumn(
localtable.getColumn(fk.getColumnIdentifier())))
fkTemp.join(localtable.getColumn(fk.getColumnIdentifier()),
pkTable.getColumn(fk.getPrimaryKeyColumnIdentifier()));
}
else {
// Add the multi-column foreign key, joining local and pk columns in
// the temporary key
Column[] locCols = fk.getColumns();
Column[] pkCols = fk.getPrimaryKeyColumns();
// Column counts must match
if (locCols != null && pkCols != null &&
locCols.length != pkCols.length) {
Log log = dbdict.getLog();
if (log.isTraceEnabled()) {
log.trace(_loc.get("fk-column-mismatch"));
}
}
for (int j = 0; j < locCols.length; j++) {
if (!fkTemp.containsColumn(
localtable.getColumn(locCols[j].getIdentifier()))) {
fkTemp.join(localtable.getColumn(locCols[j].getIdentifier()),
pkTable.getColumn(pkCols[j].getIdentifier()));
}
}
}
if (equalsForeignKey(fkTemp)) {
if (addFK)
localtable.removeForeignKey(fkTemp);
retVal = fk.getIdentifier();
break;
}
if (addFK)
localtable.removeForeignKey(fkTemp);
}
} catch(Exception ex){
Log log = dbdict.getLog();
if (log.isTraceEnabled()) {
log.trace(_loc.get("except-read-fk-name"), ex);
}
}
return retVal;
}
/**
* Joins the column of a single column FK to this FK.
* @param fk
*/
public void addColumn(ForeignKey fk) {
// Convert simple name based fk to a multi-column FK if necessary.
if (getColumns() == null || getColumns().length == 0) {
// If this FK is single column key, covert to a multi-column key
Column[] keyCols = createKeyColumns(this);
if (keyCols[0] != null && keyCols[1] != null) {
setPrimaryKeyColumnIdentifier(DBIdentifier.NULL);
setColumnIdentifier(DBIdentifier.NULL);
join(keyCols[0], keyCols[1]);
}
}
// Create the local and primary key columns from the fk and add them
// to this fk.
Column[] keyCols = createKeyColumns(fk);
if (keyCols[0] != null && keyCols[1] != null) {
join(keyCols[0], keyCols[1]);
}
}
/*
* Creates the local and primary key columns for a name-based fk.
* @return Column[] element 0 is local column
* element 1 is the primary key in another table.
*/
private static Column[] createKeyColumns(ForeignKey fk) {
Column fkCol = null;
if (!DBIdentifier.isEmpty(fk.getColumnIdentifier())) {
fkCol = new Column();
fkCol.setIdentifier(fk.getColumnIdentifier());
fkCol.setTableIdentifier(fk.getTableIdentifier());
fkCol.setSchemaIdentifier(fk.getSchemaIdentifier());
}
Column pkCol = null;
if (!DBIdentifier.isEmpty(fk.getPrimaryKeyColumnIdentifier())) {
pkCol = new Column();
pkCol.setIdentifier(fk.getPrimaryKeyColumnIdentifier());
pkCol.setTableIdentifier(fk.getPrimaryKeyTableIdentifier());
pkCol.setSchemaIdentifier(fk.getPrimaryKeySchemaIdentifier());
}
return new Column[] { fkCol, pkCol };
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
if (!super.equals(o)) return false;
ForeignKey that = (ForeignKey) o;
if (_seq != that._seq) return false;
if (_delAction != that._delAction) return false;
if (_upAction != that._upAction) return false;
if (_index != that._index) return false;
if (_pkTableName != null ? !_pkTableName.equals(that._pkTableName) : that._pkTableName != null) return false;
if (_pkSchemaName != null ? !_pkSchemaName.equals(that._pkSchemaName) : that._pkSchemaName != null) return false;
if (_pkColumnName != null ? !_pkColumnName.equals(that._pkColumnName) : that._pkColumnName != null) return false;
if (_joins != null ? !_joins.equals(that._joins) : that._joins != null) return false;
if (_joinsPK != null ? !_joinsPK.equals(that._joinsPK) : that._joinsPK != null) return false;
if (_consts != null ? !_consts.equals(that._consts) : that._consts != null) return false;
if (_constsPK != null ? !_constsPK.equals(that._constsPK) : that._constsPK != null) return false;
// Probably incorrect - comparing Object[] arrays with Arrays.equals
if (!Arrays.equals(_locals, that._locals)) return false;
// Probably incorrect - comparing Object[] arrays with Arrays.equals
if (!Arrays.equals(_pks, that._pks)) return false;
// Probably incorrect - comparing Object[] arrays with Arrays.equals
if (!Arrays.equals(_constVals, that._constVals)) return false;
// Probably incorrect - comparing Object[] arrays with Arrays.equals
if (!Arrays.equals(_constCols, that._constCols)) return false;
// Probably incorrect - comparing Object[] arrays with Arrays.equals
if (!Arrays.equals(_constValsPK, that._constValsPK)) return false;
// Probably incorrect - comparing Object[] arrays with Arrays.equals
if (!Arrays.equals(_constColsPK, that._constColsPK)) return false;
if (_pkTable != null ? !_pkTable.equals(that._pkTable) : that._pkTable != null) return false;
return _autoAssign != null ? _autoAssign.equals(that._autoAssign) : that._autoAssign == null;
}
@Override
public int hashCode() {
int result = super.hashCode();
result = 31 * result + (_pkTableName != null ? _pkTableName.hashCode() : 0);
result = 31 * result + (_pkSchemaName != null ? _pkSchemaName.hashCode() : 0);
result = 31 * result + (_pkColumnName != null ? _pkColumnName.hashCode() : 0);
result = 31 * result + _seq;
result = 31 * result + (_joins != null ? _joins.hashCode() : 0);
result = 31 * result + (_joinsPK != null ? _joinsPK.hashCode() : 0);
result = 31 * result + (_consts != null ? _consts.hashCode() : 0);
result = 31 * result + (_constsPK != null ? _constsPK.hashCode() : 0);
result = 31 * result + _delAction;
result = 31 * result + _upAction;
result = 31 * result + _index;
result = 31 * result + Arrays.hashCode(_locals);
result = 31 * result + Arrays.hashCode(_pks);
result = 31 * result + Arrays.hashCode(_constVals);
result = 31 * result + Arrays.hashCode(_constCols);
result = 31 * result + Arrays.hashCode(_constValsPK);
result = 31 * result + Arrays.hashCode(_constColsPK);
result = 31 * result + (_pkTable != null ? _pkTable.hashCode() : 0);
result = 31 * result + (_autoAssign != null ? _autoAssign.hashCode() : 0);
return result;
}
/*
* ForeignKey utility class which determines equality based upon the
* non-column state of the keys.
*/
public static class FKMapKey {
private ForeignKey _fk;
public FKMapKey(ForeignKey fk) {
_fk = fk;
}
public ForeignKey getFk() {
return _fk;
}
@Override
public int hashCode() {
return getFk().getIdentifier() != null ? getFk().getIdentifier().hashCode() : getFk().hashCode();
}
@Override
public boolean equals(Object fkObj) {
if (fkObj == this) {
return true;
}
if (fkObj == null || !(fkObj instanceof FKMapKey)) {
return false;
}
ForeignKey fk = ((FKMapKey)fkObj).getFk();
if (getFk().getDeleteAction() != fk.getDeleteAction())
return false;
if (getFk().isDeferred() != fk.isDeferred())
return false;
if (!getFk().getIdentifier().equals(fk.getIdentifier())) {
return false;
}
// Assert PK table name and schema
if (!getFk().getPrimaryKeySchemaIdentifier().equals(fk.getPrimaryKeySchemaIdentifier()) ||
!getFk().getPrimaryKeyTableIdentifier().equals(fk.getPrimaryKeyTableIdentifier()) ||
!getFk().getSchemaIdentifier().equals(fk.getSchemaIdentifier()) ||
!getFk().getTableIdentifier().equals(fk.getTableIdentifier())) {
return false;
}
return true;
}
}
}
|
googleapis/google-cloud-java | 34,879 | java-dialogflow-cx/proto-google-cloud-dialogflow-cx-v3beta1/src/main/java/com/google/cloud/dialogflow/cx/v3beta1/TestError.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/dialogflow/cx/v3beta1/test_case.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.dialogflow.cx.v3beta1;
/**
*
*
* <pre>
* Error info for running a test.
* </pre>
*
* Protobuf type {@code google.cloud.dialogflow.cx.v3beta1.TestError}
*/
public final class TestError extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.dialogflow.cx.v3beta1.TestError)
TestErrorOrBuilder {
private static final long serialVersionUID = 0L;
// Use TestError.newBuilder() to construct.
private TestError(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private TestError() {
testCase_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new TestError();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.cx.v3beta1.TestCaseProto
.internal_static_google_cloud_dialogflow_cx_v3beta1_TestError_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.cx.v3beta1.TestCaseProto
.internal_static_google_cloud_dialogflow_cx_v3beta1_TestError_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.cx.v3beta1.TestError.class,
com.google.cloud.dialogflow.cx.v3beta1.TestError.Builder.class);
}
private int bitField0_;
public static final int TEST_CASE_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object testCase_ = "";
/**
*
*
* <pre>
* The test case resource name.
* </pre>
*
* <code>string test_case = 1 [(.google.api.resource_reference) = { ... }</code>
*
* @return The testCase.
*/
@java.lang.Override
public java.lang.String getTestCase() {
java.lang.Object ref = testCase_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
testCase_ = s;
return s;
}
}
/**
*
*
* <pre>
* The test case resource name.
* </pre>
*
* <code>string test_case = 1 [(.google.api.resource_reference) = { ... }</code>
*
* @return The bytes for testCase.
*/
@java.lang.Override
public com.google.protobuf.ByteString getTestCaseBytes() {
java.lang.Object ref = testCase_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
testCase_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int STATUS_FIELD_NUMBER = 2;
private com.google.rpc.Status status_;
/**
*
*
* <pre>
* The status associated with the test.
* </pre>
*
* <code>.google.rpc.Status status = 2;</code>
*
* @return Whether the status field is set.
*/
@java.lang.Override
public boolean hasStatus() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* The status associated with the test.
* </pre>
*
* <code>.google.rpc.Status status = 2;</code>
*
* @return The status.
*/
@java.lang.Override
public com.google.rpc.Status getStatus() {
return status_ == null ? com.google.rpc.Status.getDefaultInstance() : status_;
}
/**
*
*
* <pre>
* The status associated with the test.
* </pre>
*
* <code>.google.rpc.Status status = 2;</code>
*/
@java.lang.Override
public com.google.rpc.StatusOrBuilder getStatusOrBuilder() {
return status_ == null ? com.google.rpc.Status.getDefaultInstance() : status_;
}
public static final int TEST_TIME_FIELD_NUMBER = 3;
private com.google.protobuf.Timestamp testTime_;
/**
*
*
* <pre>
* The timestamp when the test was completed.
* </pre>
*
* <code>.google.protobuf.Timestamp test_time = 3;</code>
*
* @return Whether the testTime field is set.
*/
@java.lang.Override
public boolean hasTestTime() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* The timestamp when the test was completed.
* </pre>
*
* <code>.google.protobuf.Timestamp test_time = 3;</code>
*
* @return The testTime.
*/
@java.lang.Override
public com.google.protobuf.Timestamp getTestTime() {
return testTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : testTime_;
}
/**
*
*
* <pre>
* The timestamp when the test was completed.
* </pre>
*
* <code>.google.protobuf.Timestamp test_time = 3;</code>
*/
@java.lang.Override
public com.google.protobuf.TimestampOrBuilder getTestTimeOrBuilder() {
return testTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : testTime_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(testCase_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, testCase_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(2, getStatus());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(3, getTestTime());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(testCase_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, testCase_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getStatus());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getTestTime());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.dialogflow.cx.v3beta1.TestError)) {
return super.equals(obj);
}
com.google.cloud.dialogflow.cx.v3beta1.TestError other =
(com.google.cloud.dialogflow.cx.v3beta1.TestError) obj;
if (!getTestCase().equals(other.getTestCase())) return false;
if (hasStatus() != other.hasStatus()) return false;
if (hasStatus()) {
if (!getStatus().equals(other.getStatus())) return false;
}
if (hasTestTime() != other.hasTestTime()) return false;
if (hasTestTime()) {
if (!getTestTime().equals(other.getTestTime())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + TEST_CASE_FIELD_NUMBER;
hash = (53 * hash) + getTestCase().hashCode();
if (hasStatus()) {
hash = (37 * hash) + STATUS_FIELD_NUMBER;
hash = (53 * hash) + getStatus().hashCode();
}
if (hasTestTime()) {
hash = (37 * hash) + TEST_TIME_FIELD_NUMBER;
hash = (53 * hash) + getTestTime().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.dialogflow.cx.v3beta1.TestError parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.cx.v3beta1.TestError parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3beta1.TestError parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.cx.v3beta1.TestError parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3beta1.TestError parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.cx.v3beta1.TestError parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3beta1.TestError parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.cx.v3beta1.TestError parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3beta1.TestError parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.cx.v3beta1.TestError parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3beta1.TestError parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.cx.v3beta1.TestError parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.dialogflow.cx.v3beta1.TestError prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Error info for running a test.
* </pre>
*
* Protobuf type {@code google.cloud.dialogflow.cx.v3beta1.TestError}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.dialogflow.cx.v3beta1.TestError)
com.google.cloud.dialogflow.cx.v3beta1.TestErrorOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.cx.v3beta1.TestCaseProto
.internal_static_google_cloud_dialogflow_cx_v3beta1_TestError_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.cx.v3beta1.TestCaseProto
.internal_static_google_cloud_dialogflow_cx_v3beta1_TestError_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.cx.v3beta1.TestError.class,
com.google.cloud.dialogflow.cx.v3beta1.TestError.Builder.class);
}
// Construct using com.google.cloud.dialogflow.cx.v3beta1.TestError.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getStatusFieldBuilder();
getTestTimeFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
testCase_ = "";
status_ = null;
if (statusBuilder_ != null) {
statusBuilder_.dispose();
statusBuilder_ = null;
}
testTime_ = null;
if (testTimeBuilder_ != null) {
testTimeBuilder_.dispose();
testTimeBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.dialogflow.cx.v3beta1.TestCaseProto
.internal_static_google_cloud_dialogflow_cx_v3beta1_TestError_descriptor;
}
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3beta1.TestError getDefaultInstanceForType() {
return com.google.cloud.dialogflow.cx.v3beta1.TestError.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3beta1.TestError build() {
com.google.cloud.dialogflow.cx.v3beta1.TestError result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3beta1.TestError buildPartial() {
com.google.cloud.dialogflow.cx.v3beta1.TestError result =
new com.google.cloud.dialogflow.cx.v3beta1.TestError(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.dialogflow.cx.v3beta1.TestError result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.testCase_ = testCase_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.status_ = statusBuilder_ == null ? status_ : statusBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.testTime_ = testTimeBuilder_ == null ? testTime_ : testTimeBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.dialogflow.cx.v3beta1.TestError) {
return mergeFrom((com.google.cloud.dialogflow.cx.v3beta1.TestError) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.dialogflow.cx.v3beta1.TestError other) {
if (other == com.google.cloud.dialogflow.cx.v3beta1.TestError.getDefaultInstance())
return this;
if (!other.getTestCase().isEmpty()) {
testCase_ = other.testCase_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.hasStatus()) {
mergeStatus(other.getStatus());
}
if (other.hasTestTime()) {
mergeTestTime(other.getTestTime());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
testCase_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getStatusFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
input.readMessage(getTestTimeFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object testCase_ = "";
/**
*
*
* <pre>
* The test case resource name.
* </pre>
*
* <code>string test_case = 1 [(.google.api.resource_reference) = { ... }</code>
*
* @return The testCase.
*/
public java.lang.String getTestCase() {
java.lang.Object ref = testCase_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
testCase_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The test case resource name.
* </pre>
*
* <code>string test_case = 1 [(.google.api.resource_reference) = { ... }</code>
*
* @return The bytes for testCase.
*/
public com.google.protobuf.ByteString getTestCaseBytes() {
java.lang.Object ref = testCase_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
testCase_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The test case resource name.
* </pre>
*
* <code>string test_case = 1 [(.google.api.resource_reference) = { ... }</code>
*
* @param value The testCase to set.
* @return This builder for chaining.
*/
public Builder setTestCase(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
testCase_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* The test case resource name.
* </pre>
*
* <code>string test_case = 1 [(.google.api.resource_reference) = { ... }</code>
*
* @return This builder for chaining.
*/
public Builder clearTestCase() {
testCase_ = getDefaultInstance().getTestCase();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* The test case resource name.
* </pre>
*
* <code>string test_case = 1 [(.google.api.resource_reference) = { ... }</code>
*
* @param value The bytes for testCase to set.
* @return This builder for chaining.
*/
public Builder setTestCaseBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
testCase_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private com.google.rpc.Status status_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.rpc.Status, com.google.rpc.Status.Builder, com.google.rpc.StatusOrBuilder>
statusBuilder_;
/**
*
*
* <pre>
* The status associated with the test.
* </pre>
*
* <code>.google.rpc.Status status = 2;</code>
*
* @return Whether the status field is set.
*/
public boolean hasStatus() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* The status associated with the test.
* </pre>
*
* <code>.google.rpc.Status status = 2;</code>
*
* @return The status.
*/
public com.google.rpc.Status getStatus() {
if (statusBuilder_ == null) {
return status_ == null ? com.google.rpc.Status.getDefaultInstance() : status_;
} else {
return statusBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* The status associated with the test.
* </pre>
*
* <code>.google.rpc.Status status = 2;</code>
*/
public Builder setStatus(com.google.rpc.Status value) {
if (statusBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
status_ = value;
} else {
statusBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The status associated with the test.
* </pre>
*
* <code>.google.rpc.Status status = 2;</code>
*/
public Builder setStatus(com.google.rpc.Status.Builder builderForValue) {
if (statusBuilder_ == null) {
status_ = builderForValue.build();
} else {
statusBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The status associated with the test.
* </pre>
*
* <code>.google.rpc.Status status = 2;</code>
*/
public Builder mergeStatus(com.google.rpc.Status value) {
if (statusBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& status_ != null
&& status_ != com.google.rpc.Status.getDefaultInstance()) {
getStatusBuilder().mergeFrom(value);
} else {
status_ = value;
}
} else {
statusBuilder_.mergeFrom(value);
}
if (status_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* The status associated with the test.
* </pre>
*
* <code>.google.rpc.Status status = 2;</code>
*/
public Builder clearStatus() {
bitField0_ = (bitField0_ & ~0x00000002);
status_ = null;
if (statusBuilder_ != null) {
statusBuilder_.dispose();
statusBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* The status associated with the test.
* </pre>
*
* <code>.google.rpc.Status status = 2;</code>
*/
public com.google.rpc.Status.Builder getStatusBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getStatusFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* The status associated with the test.
* </pre>
*
* <code>.google.rpc.Status status = 2;</code>
*/
public com.google.rpc.StatusOrBuilder getStatusOrBuilder() {
if (statusBuilder_ != null) {
return statusBuilder_.getMessageOrBuilder();
} else {
return status_ == null ? com.google.rpc.Status.getDefaultInstance() : status_;
}
}
/**
*
*
* <pre>
* The status associated with the test.
* </pre>
*
* <code>.google.rpc.Status status = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.rpc.Status, com.google.rpc.Status.Builder, com.google.rpc.StatusOrBuilder>
getStatusFieldBuilder() {
if (statusBuilder_ == null) {
statusBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.rpc.Status,
com.google.rpc.Status.Builder,
com.google.rpc.StatusOrBuilder>(getStatus(), getParentForChildren(), isClean());
status_ = null;
}
return statusBuilder_;
}
private com.google.protobuf.Timestamp testTime_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Timestamp,
com.google.protobuf.Timestamp.Builder,
com.google.protobuf.TimestampOrBuilder>
testTimeBuilder_;
/**
*
*
* <pre>
* The timestamp when the test was completed.
* </pre>
*
* <code>.google.protobuf.Timestamp test_time = 3;</code>
*
* @return Whether the testTime field is set.
*/
public boolean hasTestTime() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
*
*
* <pre>
* The timestamp when the test was completed.
* </pre>
*
* <code>.google.protobuf.Timestamp test_time = 3;</code>
*
* @return The testTime.
*/
public com.google.protobuf.Timestamp getTestTime() {
if (testTimeBuilder_ == null) {
return testTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : testTime_;
} else {
return testTimeBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* The timestamp when the test was completed.
* </pre>
*
* <code>.google.protobuf.Timestamp test_time = 3;</code>
*/
public Builder setTestTime(com.google.protobuf.Timestamp value) {
if (testTimeBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
testTime_ = value;
} else {
testTimeBuilder_.setMessage(value);
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* The timestamp when the test was completed.
* </pre>
*
* <code>.google.protobuf.Timestamp test_time = 3;</code>
*/
public Builder setTestTime(com.google.protobuf.Timestamp.Builder builderForValue) {
if (testTimeBuilder_ == null) {
testTime_ = builderForValue.build();
} else {
testTimeBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* The timestamp when the test was completed.
* </pre>
*
* <code>.google.protobuf.Timestamp test_time = 3;</code>
*/
public Builder mergeTestTime(com.google.protobuf.Timestamp value) {
if (testTimeBuilder_ == null) {
if (((bitField0_ & 0x00000004) != 0)
&& testTime_ != null
&& testTime_ != com.google.protobuf.Timestamp.getDefaultInstance()) {
getTestTimeBuilder().mergeFrom(value);
} else {
testTime_ = value;
}
} else {
testTimeBuilder_.mergeFrom(value);
}
if (testTime_ != null) {
bitField0_ |= 0x00000004;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* The timestamp when the test was completed.
* </pre>
*
* <code>.google.protobuf.Timestamp test_time = 3;</code>
*/
public Builder clearTestTime() {
bitField0_ = (bitField0_ & ~0x00000004);
testTime_ = null;
if (testTimeBuilder_ != null) {
testTimeBuilder_.dispose();
testTimeBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* The timestamp when the test was completed.
* </pre>
*
* <code>.google.protobuf.Timestamp test_time = 3;</code>
*/
public com.google.protobuf.Timestamp.Builder getTestTimeBuilder() {
bitField0_ |= 0x00000004;
onChanged();
return getTestTimeFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* The timestamp when the test was completed.
* </pre>
*
* <code>.google.protobuf.Timestamp test_time = 3;</code>
*/
public com.google.protobuf.TimestampOrBuilder getTestTimeOrBuilder() {
if (testTimeBuilder_ != null) {
return testTimeBuilder_.getMessageOrBuilder();
} else {
return testTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : testTime_;
}
}
/**
*
*
* <pre>
* The timestamp when the test was completed.
* </pre>
*
* <code>.google.protobuf.Timestamp test_time = 3;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Timestamp,
com.google.protobuf.Timestamp.Builder,
com.google.protobuf.TimestampOrBuilder>
getTestTimeFieldBuilder() {
if (testTimeBuilder_ == null) {
testTimeBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Timestamp,
com.google.protobuf.Timestamp.Builder,
com.google.protobuf.TimestampOrBuilder>(
getTestTime(), getParentForChildren(), isClean());
testTime_ = null;
}
return testTimeBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.dialogflow.cx.v3beta1.TestError)
}
// @@protoc_insertion_point(class_scope:google.cloud.dialogflow.cx.v3beta1.TestError)
private static final com.google.cloud.dialogflow.cx.v3beta1.TestError DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.dialogflow.cx.v3beta1.TestError();
}
public static com.google.cloud.dialogflow.cx.v3beta1.TestError getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<TestError> PARSER =
new com.google.protobuf.AbstractParser<TestError>() {
@java.lang.Override
public TestError parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<TestError> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<TestError> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3beta1.TestError getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/commons-rng | 35,199 | commons-rng-sampling/src/main/java/org/apache/commons/rng/sampling/distribution/FastLoadedDiceRollerDiscreteSampler.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.rng.sampling.distribution;
import java.math.BigInteger;
import java.util.Arrays;
import org.apache.commons.rng.UniformRandomProvider;
/**
* Distribution sampler that uses the Fast Loaded Dice Roller (FLDR). It can be used to
* sample from {@code n} values each with an associated relative weight. If all unique items
* are assigned the same weight it is more efficient to use the {@link DiscreteUniformSampler}.
*
* <p>Given a list {@code L} of {@code n} positive numbers,
* where {@code L[i]} represents the relative weight of the {@code i}th side, FLDR returns
* integer {@code i} with relative probability {@code L[i]}.
*
* <p>FLDR produces <em>exact</em> samples from the specified probability distribution.
* <ul>
* <li>For integer weights, the probability of returning {@code i} is precisely equal to the
* rational number {@code L[i] / m}, where {@code m} is the sum of {@code L}.
* <li>For floating-points weights, each weight {@code L[i]} is converted to the
* corresponding rational number {@code p[i] / q[i]} where {@code p[i]} is a positive integer and
* {@code q[i]} is a power of 2. The rational weights are then normalized (exactly) to sum to unity.
* </ul>
*
* <p>Note that if <em>exact</em> samples are not required then an alternative sampler that
* ignores very small relative weights may have improved sampling performance.
*
* <p>This implementation is based on the algorithm in:
*
* <blockquote>
* Feras A. Saad, Cameron E. Freer, Martin C. Rinard, and Vikash K. Mansinghka.
* The Fast Loaded Dice Roller: A Near-Optimal Exact Sampler for Discrete Probability
* Distributions. In AISTATS 2020: Proceedings of the 23rd International Conference on
* Artificial Intelligence and Statistics, Proceedings of Machine Learning Research 108,
* Palermo, Sicily, Italy, 2020.
* </blockquote>
*
* <p>Sampling uses {@link UniformRandomProvider#nextInt()} as the source of random bits.
*
* @see <a href="https://arxiv.org/abs/2003.03830">Saad et al (2020)
* Proceedings of the 23rd International Conference on Artificial Intelligence and Statistics,
* PMLR 108:1036-1046.</a>
* @since 1.5
*/
public abstract class FastLoadedDiceRollerDiscreteSampler
implements SharedStateDiscreteSampler {
/**
* The maximum size of an array.
*
* <p>This value is taken from the limit in Open JDK 8 {@code java.util.ArrayList}.
* It allows VMs to reserve some header words in an array.
*/
private static final int MAX_ARRAY_SIZE = Integer.MAX_VALUE - 8;
/** The maximum biased exponent for a finite double.
* This is offset by 1023 from {@code Math.getExponent(Double.MAX_VALUE)}. */
private static final int MAX_BIASED_EXPONENT = 2046;
/** Size of the mantissa of a double. Equal to 52 bits. */
private static final int MANTISSA_SIZE = 52;
/** Mask to extract the 52-bit mantissa from a long representation of a double. */
private static final long MANTISSA_MASK = 0x000f_ffff_ffff_ffffL;
/** BigInteger representation of {@link Long#MAX_VALUE}. */
private static final BigInteger MAX_LONG = BigInteger.valueOf(Long.MAX_VALUE);
/** The maximum offset that will avoid loss of bits for a left shift of a 53-bit value.
* The value will remain positive for any shift {@code <=} this value. */
private static final int MAX_OFFSET = 10;
/** Initial value for no leaf node label. */
private static final int NO_LABEL = Integer.MAX_VALUE;
/** Name of the sampler. */
private static final String SAMPLER_NAME = "Fast Loaded Dice Roller";
/**
* Class to handle the edge case of observations in only one category.
*/
private static final class FixedValueDiscreteSampler extends FastLoadedDiceRollerDiscreteSampler {
/** The sample value. */
private final int sampleValue;
/**
* @param sampleValue Sample value.
*/
FixedValueDiscreteSampler(int sampleValue) {
this.sampleValue = sampleValue;
}
@Override
public int sample() {
return sampleValue;
}
@Override
public FastLoadedDiceRollerDiscreteSampler withUniformRandomProvider(UniformRandomProvider rng) {
return this;
}
@Override
public String toString() {
return SAMPLER_NAME;
}
}
/**
* Class to implement the FLDR sample algorithm.
*/
private static final class FLDRSampler extends FastLoadedDiceRollerDiscreteSampler {
/** Empty boolean source. This is the location of the sign-bit after 31 right shifts on
* the boolean source. */
private static final int EMPTY_BOOL_SOURCE = 1;
/** Underlying source of randomness. */
private final UniformRandomProvider rng;
/** Number of categories. */
private final int n;
/** Number of levels in the discrete distribution generating (DDG) tree.
* Equal to {@code ceil(log2(m))} where {@code m} is the sum of observations. */
private final int k;
/** Number of leaf nodes at each level. */
private final int[] h;
/** Stores the leaf node labels in increasing order. Named {@code H} in the FLDR paper. */
private final int[] lH;
/**
* Provides a bit source for booleans.
*
* <p>A cached value from a call to {@link UniformRandomProvider#nextInt()}.
*
* <p>Only stores 31-bits when full as 1 bit has already been consumed.
* The sign bit is a flag that shifts down so the source eventually equals 1
* when all bits are consumed and will trigger a refill.
*/
private int booleanSource = EMPTY_BOOL_SOURCE;
/**
* Creates a sampler.
*
* <p>The input parameters are not validated and must be correctly computed tables.
*
* @param rng Generator of uniformly distributed random numbers.
* @param n Number of categories
* @param k Number of levels in the discrete distribution generating (DDG) tree.
* Equal to {@code ceil(log2(m))} where {@code m} is the sum of observations.
* @param h Number of leaf nodes at each level.
* @param lH Stores the leaf node labels in increasing order.
*/
FLDRSampler(UniformRandomProvider rng,
int n,
int k,
int[] h,
int[] lH) {
this.rng = rng;
this.n = n;
this.k = k;
// Deliberate direct storage of input arrays
this.h = h;
this.lH = lH;
}
/**
* Creates a copy with a new source of randomness.
*
* @param rng Generator of uniformly distributed random numbers.
* @param source Source to copy.
*/
private FLDRSampler(UniformRandomProvider rng,
FLDRSampler source) {
this.rng = rng;
this.n = source.n;
this.k = source.k;
this.h = source.h;
this.lH = source.lH;
}
/** {@inheritDoc} */
@Override
public int sample() {
// ALGORITHM 5: SAMPLE
int c = 0;
int d = 0;
for (;;) {
// b = flip()
// d = 2 * d + (1 - b)
d = (d << 1) + flip();
if (d < h[c]) {
// z = H[d][c]
final int z = lH[d * k + c];
// assert z != NO_LABEL
if (z < n) {
return z;
}
d = 0;
c = 0;
} else {
d = d - h[c];
c++;
}
}
}
/**
* Provides a source of boolean bits.
*
* <p>Note: This replicates the boolean cache functionality of
* {@code o.a.c.rng.core.source32.IntProvider}. The method has been simplified to return
* an {@code int} value rather than a {@code boolean}.
*
* @return the bit (0 or 1)
*/
private int flip() {
int bits = booleanSource;
if (bits == 1) {
// Refill
bits = rng.nextInt();
// Store a refill flag in the sign bit and the unused 31 bits, return lowest bit
booleanSource = Integer.MIN_VALUE | (bits >>> 1);
return bits & 0x1;
}
// Shift down eventually triggering refill, return current lowest bit
booleanSource = bits >>> 1;
return bits & 0x1;
}
/** {@inheritDoc} */
@Override
public String toString() {
return SAMPLER_NAME + " [" + rng.toString() + "]";
}
/** {@inheritDoc} */
@Override
public FastLoadedDiceRollerDiscreteSampler withUniformRandomProvider(UniformRandomProvider rng) {
return new FLDRSampler(rng, this);
}
}
/** Package-private constructor. */
FastLoadedDiceRollerDiscreteSampler() {
// Intentionally empty
}
/** {@inheritDoc} */
// Redeclare the signature to return a FastLoadedDiceRollerSampler not a SharedStateLongSampler
@Override
public abstract FastLoadedDiceRollerDiscreteSampler withUniformRandomProvider(UniformRandomProvider rng);
/**
* Creates a sampler.
*
* <p>Note: The discrete distribution generating (DDG) tree requires {@code (n + 1) * k} entries
* where {@code n} is the number of categories, {@code k == ceil(log2(m))} and {@code m}
* is the sum of the observed frequencies. An exception is raised if this cannot be allocated
* as a single array.
*
* <p>For reference the sum is limited to {@link Long#MAX_VALUE} and the value {@code k} to 63.
* The number of categories is limited to approximately {@code ((2^31 - 1) / k) = 34,087,042}
* when the sum of frequencies is large enough to create k=63.
*
* @param rng Generator of uniformly distributed random numbers.
* @param frequencies Observed frequencies of the discrete distribution.
* @return the sampler
* @throws IllegalArgumentException if {@code frequencies} is null or empty, a
* frequency is negative, the sum of all frequencies is either zero or
* above {@link Long#MAX_VALUE}, or the size of the discrete distribution generating tree
* is too large.
*/
public static FastLoadedDiceRollerDiscreteSampler of(UniformRandomProvider rng,
long[] frequencies) {
final long m = sum(frequencies);
// Obtain indices of non-zero frequencies
final int[] indices = indicesOfNonZero(frequencies);
// Edge case for 1 non-zero weight. This also handles edge case for 1 observation
// (as log2(m) == 0 will break the computation of the DDG tree).
if (indices.length == 1) {
return new FixedValueDiscreteSampler(indexOfNonZero(frequencies));
}
return createSampler(rng, frequencies, indices, m);
}
/**
* Creates a sampler.
*
* <p>Weights are converted to rational numbers {@code p / q} where {@code q} is a power of 2.
* The numerators {@code p} are scaled to use a common denominator before summing.
*
* <p>All weights are used to create the sampler. Weights with a small magnitude relative
* to the largest weight can be excluded using the constructor method with the
* relative magnitude parameter {@code alpha} (see {@link #of(UniformRandomProvider, double[], int)}).
*
* @param rng Generator of uniformly distributed random numbers.
* @param weights Weights of the discrete distribution.
* @return the sampler
* @throws IllegalArgumentException if {@code weights} is null or empty, a
* weight is negative, infinite or {@code NaN}, the sum of all weights is zero, or the size
* of the discrete distribution generating tree is too large.
* @see #of(UniformRandomProvider, double[], int)
*/
public static FastLoadedDiceRollerDiscreteSampler of(UniformRandomProvider rng,
double[] weights) {
return of(rng, weights, 0);
}
/**
* Creates a sampler.
*
* <p>Weights are converted to rational numbers {@code p / q} where {@code q} is
* a power of 2. The numerators {@code p} are scaled to use a common
* denominator before summing.
*
* <p>Note: The discrete distribution generating (DDG) tree requires
* {@code (n + 1) * k} entries where {@code n} is the number of categories,
* {@code k == ceil(log2(m))} and {@code m} is the sum of the weight numerators
* {@code q}. An exception is raised if this cannot be allocated as a single
* array.
*
* <p>For reference the value {@code k} is equal to or greater than the ratio of
* the largest to the smallest weight expressed as a power of 2. For
* {@code Double.MAX_VALUE / Double.MIN_VALUE} this is ~2098. The value
* {@code k} increases with the sum of the weight numerators. A number of
* weights in excess of 1,000,000 with values equal to {@link Double#MAX_VALUE}
* would be required to raise an exception when the minimum weight is
* {@link Double#MIN_VALUE}.
*
* <p>Weights with a small magnitude relative to the largest weight can be
* excluded using the relative magnitude parameter {@code alpha}. This will set
* any weight to zero if the magnitude is approximately 2<sup>alpha</sup>
* <em>smaller</em> than the largest weight. This comparison is made using only
* the exponent of the input weights. The {@code alpha} parameter is ignored if
* not above zero. Note that a small {@code alpha} parameter will exclude more
* weights than a large {@code alpha} parameter.
*
* <p>The alpha parameter can be used to exclude categories that
* have a very low probability of occurrence and will improve the construction
* performance of the sampler. The effect on sampling performance depends on
* the relative weights of the excluded categories; typically a high {@code alpha}
* is used to exclude categories that would be visited with a very low probability
* and the sampling performance is unchanged.
*
* <p><b>Implementation Note</b>
*
* <p>This method creates a sampler with <em>exact</em> samples from the
* specified probability distribution. It is recommended to use this method:
* <ul>
* <li>if the weights are computed, for example from a probability mass function; or
* <li>if the weights sum to an infinite value.
* </ul>
*
* <p>If the weights are computed from empirical observations then it is
* recommended to use the factory method
* {@link #of(UniformRandomProvider, long[]) accepting frequencies}. This
* requires the total number of observations to be representable as a long
* integer.
*
* <p>Note that if all weights are scaled by a power of 2 to be integers, and
* each integer can be represented as a positive 64-bit long value, then the
* sampler created using this method will match the output from a sampler
* created with the scaled weights converted to long values for the factory
* method {@link #of(UniformRandomProvider, long[]) accepting frequencies}. This
* assumes the sum of the integer values does not overflow.
*
* <p>It should be noted that the conversion of weights to rational numbers has
* a performance overhead during construction (sampling performance is not
* affected). This may be avoided by first converting them to integer values
* that can be summed without overflow. For example by scaling values by
* {@code 2^62 / sum} and converting to long by casting or rounding.
*
* <p>This approach may increase the efficiency of construction. The resulting
* sampler may no longer produce <em>exact</em> samples from the distribution.
* In particular any weights with a converted frequency of zero cannot be
* sampled.
*
* @param rng Generator of uniformly distributed random numbers.
* @param weights Weights of the discrete distribution.
* @param alpha Alpha parameter.
* @return the sampler
* @throws IllegalArgumentException if {@code weights} is null or empty, a
* weight is negative, infinite or {@code NaN}, the sum of all weights is zero,
* or the size of the discrete distribution generating tree is too large.
* @see #of(UniformRandomProvider, long[])
*/
public static FastLoadedDiceRollerDiscreteSampler of(UniformRandomProvider rng,
double[] weights,
int alpha) {
final int n = checkWeightsNonZeroLength(weights);
// Convert floating-point double to a relative weight
// using a shifted integer representation
final long[] frequencies = new long[n];
final int[] offsets = new int[n];
convertToIntegers(weights, frequencies, offsets, alpha);
// Obtain indices of non-zero weights
final int[] indices = indicesOfNonZero(frequencies);
// Edge case for 1 non-zero weight.
if (indices.length == 1) {
return new FixedValueDiscreteSampler(indexOfNonZero(frequencies));
}
final BigInteger m = sum(frequencies, offsets, indices);
// Use long arithmetic if possible. This occurs when the weights are similar in magnitude.
if (m.compareTo(MAX_LONG) <= 0) {
// Apply the offset
for (int i = 0; i < n; i++) {
frequencies[i] <<= offsets[i];
}
return createSampler(rng, frequencies, indices, m.longValue());
}
return createSampler(rng, frequencies, offsets, indices, m);
}
/**
* Sum the frequencies.
*
* @param frequencies Frequencies.
* @return the sum
* @throws IllegalArgumentException if {@code frequencies} is null or empty, a
* frequency is negative, or the sum of all frequencies is either zero or above
* {@link Long#MAX_VALUE}
*/
private static long sum(long[] frequencies) {
// Validate
if (frequencies == null || frequencies.length == 0) {
throw new IllegalArgumentException("frequencies must contain at least 1 value");
}
// Sum the values.
// Combine all the sign bits in the observations and the intermediate sum in a flag.
long m = 0;
long signFlag = 0;
for (final long o : frequencies) {
m += o;
signFlag |= o | m;
}
// Check for a sign-bit.
if (signFlag < 0) {
// One or more observations were negative, or the sum overflowed.
for (final long o : frequencies) {
if (o < 0) {
throw new IllegalArgumentException("frequencies must contain positive values: " + o);
}
}
throw new IllegalArgumentException("Overflow when summing frequencies");
}
if (m == 0) {
throw new IllegalArgumentException("Sum of frequencies is zero");
}
return m;
}
/**
* Convert the floating-point weights to relative weights represented as
* integers {@code value * 2^exponent}. The relative weight as an integer is:
*
* <pre>
* BigInteger.valueOf(value).shiftLeft(exponent)
* </pre>
*
* <p>Note that the weights are created using a common power-of-2 scaling
* operation so the minimum exponent is zero.
*
* <p>A positive {@code alpha} parameter is used to set any weight to zero if
* the magnitude is approximately 2<sup>alpha</sup> <em>smaller</em> than the
* largest weight. This comparison is made using only the exponent of the input
* weights.
*
* @param weights Weights of the discrete distribution.
* @param values Output floating-point mantissas converted to 53-bit integers.
* @param exponents Output power of 2 exponent.
* @param alpha Alpha parameter.
* @throws IllegalArgumentException if a weight is negative, infinite or
* {@code NaN}, or the sum of all weights is zero.
*/
private static void convertToIntegers(double[] weights, long[] values, int[] exponents, int alpha) {
int maxExponent = Integer.MIN_VALUE;
for (int i = 0; i < weights.length; i++) {
final double weight = weights[i];
// Ignore zero.
// When creating the integer value later using bit shifts the result will remain zero.
if (weight == 0) {
continue;
}
final long bits = Double.doubleToRawLongBits(weight);
// For the IEEE 754 format see Double.longBitsToDouble(long).
// Extract the exponent (with the sign bit)
int exp = (int) (bits >>> MANTISSA_SIZE);
// Detect negative, infinite or NaN.
// Note: Negative values sign bit will cause the exponent to be too high.
if (exp > MAX_BIASED_EXPONENT) {
throw new IllegalArgumentException("Invalid weight: " + weight);
}
long mantissa;
if (exp == 0) {
// Sub-normal number:
mantissa = (bits & MANTISSA_MASK) << 1;
// Here we convert to a normalised number by counting the leading zeros
// to obtain the number of shifts of the most significant bit in
// the mantissa that is required to get a 1 at position 53 (i.e. as
// if it were a normal number with assumed leading bit).
final int shift = Long.numberOfLeadingZeros(mantissa << 11);
mantissa <<= shift;
exp -= shift;
} else {
// Normal number. Add the implicit leading 1-bit.
mantissa = (bits & MANTISSA_MASK) | (1L << MANTISSA_SIZE);
}
// Here the floating-point number is equal to:
// mantissa * 2^(exp-1075)
values[i] = mantissa;
exponents[i] = exp;
maxExponent = Math.max(maxExponent, exp);
}
// No exponent indicates that all weights are zero
if (maxExponent == Integer.MIN_VALUE) {
throw new IllegalArgumentException("Sum of weights is zero");
}
filterWeights(values, exponents, alpha, maxExponent);
scaleWeights(values, exponents);
}
/**
* Filters small weights using the {@code alpha} parameter.
* A positive {@code alpha} parameter is used to set any weight to zero if
* the magnitude is approximately 2<sup>alpha</sup> <em>smaller</em> than the
* largest weight. This comparison is made using only the exponent of the input
* weights.
*
* @param values 53-bit values.
* @param exponents Power of 2 exponent.
* @param alpha Alpha parameter.
* @param maxExponent Maximum exponent.
*/
private static void filterWeights(long[] values, int[] exponents, int alpha, int maxExponent) {
if (alpha > 0) {
// Filter weights. This must be done before the values are shifted so
// the exponent represents the approximate magnitude of the value.
for (int i = 0; i < exponents.length; i++) {
if (maxExponent - exponents[i] > alpha) {
values[i] = 0;
}
}
}
}
/**
* Scale the weights represented as integers {@code value * 2^exponent} to use a
* minimum exponent of zero. The values are scaled to remove any common trailing zeros
* in their representation. This ultimately reduces the size of the discrete distribution
* generating (DGG) tree.
*
* @param values 53-bit values.
* @param exponents Power of 2 exponent.
*/
private static void scaleWeights(long[] values, int[] exponents) {
// Find the minimum exponent and common trailing zeros.
int minExponent = Integer.MAX_VALUE;
for (int i = 0; i < exponents.length; i++) {
if (values[i] != 0) {
minExponent = Math.min(minExponent, exponents[i]);
}
}
// Trailing zeros occur when the original weights have a representation with
// less than 52 binary digits, e.g. {1.5, 0.5, 0.25}.
int trailingZeros = Long.SIZE;
for (int i = 0; i < values.length && trailingZeros != 0; i++) {
trailingZeros = Math.min(trailingZeros, Long.numberOfTrailingZeros(values[i]));
}
// Scale by a power of 2 so the minimum exponent is zero.
for (int i = 0; i < exponents.length; i++) {
exponents[i] -= minExponent;
}
// Remove common trailing zeros.
if (trailingZeros != 0) {
for (int i = 0; i < values.length; i++) {
values[i] >>>= trailingZeros;
}
}
}
/**
* Sum the integers at the specified indices.
* Integers are represented as {@code value * 2^exponent}.
*
* @param values 53-bit values.
* @param exponents Power of 2 exponent.
* @param indices Indices to sum.
* @return the sum
*/
private static BigInteger sum(long[] values, int[] exponents, int[] indices) {
BigInteger m = BigInteger.ZERO;
for (final int i : indices) {
m = m.add(toBigInteger(values[i], exponents[i]));
}
return m;
}
/**
* Convert the value and left shift offset to a BigInteger.
* It is assumed the value is at most 53-bits. This allows optimising the left
* shift if it is below 11 bits.
*
* @param value 53-bit value.
* @param offset Left shift offset (must be positive).
* @return the BigInteger
*/
private static BigInteger toBigInteger(long value, int offset) {
// Ignore zeros. The sum method uses indices of non-zero values.
if (offset <= MAX_OFFSET) {
// Assume (value << offset) <= Long.MAX_VALUE
return BigInteger.valueOf(value << offset);
}
return BigInteger.valueOf(value).shiftLeft(offset);
}
/**
* Creates the sampler.
*
* <p>It is assumed the frequencies are all positive and the sum does not
* overflow.
*
* @param rng Generator of uniformly distributed random numbers.
* @param frequencies Observed frequencies of the discrete distribution.
* @param indices Indices of non-zero frequencies.
* @param m Sum of the frequencies.
* @return the sampler
*/
private static FastLoadedDiceRollerDiscreteSampler createSampler(UniformRandomProvider rng,
long[] frequencies,
int[] indices,
long m) {
// ALGORITHM 5: PREPROCESS
// a == frequencies
// m = sum(a)
// h = leaf node count
// H = leaf node label (lH)
final int n = frequencies.length;
// k = ceil(log2(m))
final int k = 64 - Long.numberOfLeadingZeros(m - 1);
// r = a(n+1) = 2^k - m
final long r = (1L << k) - m;
// Note:
// A sparse matrix can often be used for H, as most of its entries are empty.
// This implementation uses a 1D array for efficiency at the cost of memory.
// This is limited to approximately ((2^31 - 1) / k), e.g. 34087042 when the sum of
// observations is large enough to create k=63.
// This could be handled using a 2D array. In practice a number of categories this
// large is not expected and is currently not supported.
final int[] h = new int[k];
final int[] lH = new int[checkArraySize((n + 1L) * k)];
Arrays.fill(lH, NO_LABEL);
int d;
for (int j = 0; j < k; j++) {
final int shift = (k - 1) - j;
final long bitMask = 1L << shift;
d = 0;
for (final int i : indices) {
// bool w ← (a[i] >> (k − 1) − j)) & 1
// h[j] = h[j] + w
// if w then:
if ((frequencies[i] & bitMask) != 0) {
h[j]++;
// H[d][j] = i
lH[d * k + j] = i;
d++;
}
}
// process a(n+1) without extending the input frequencies array by 1
if ((r & bitMask) != 0) {
h[j]++;
lH[d * k + j] = n;
}
}
return new FLDRSampler(rng, n, k, h, lH);
}
/**
* Creates the sampler. Frequencies are represented as a 53-bit value with a
* left-shift offset.
* <pre>
* BigInteger.valueOf(value).shiftLeft(offset)
* </pre>
*
* <p>It is assumed the frequencies are all positive.
*
* @param rng Generator of uniformly distributed random numbers.
* @param frequencies Observed frequencies of the discrete distribution.
* @param offsets Left shift offsets (must be positive).
* @param indices Indices of non-zero frequencies.
* @param m Sum of the frequencies.
* @return the sampler
*/
private static FastLoadedDiceRollerDiscreteSampler createSampler(UniformRandomProvider rng,
long[] frequencies,
int[] offsets,
int[] indices,
BigInteger m) {
// Repeat the logic from createSampler(...) using extended arithmetic to test the bits
// ALGORITHM 5: PREPROCESS
// a == frequencies
// m = sum(a)
// h = leaf node count
// H = leaf node label (lH)
final int n = frequencies.length;
// k = ceil(log2(m))
final int k = m.subtract(BigInteger.ONE).bitLength();
// r = a(n+1) = 2^k - m
final BigInteger r = BigInteger.ONE.shiftLeft(k).subtract(m);
final int[] h = new int[k];
final int[] lH = new int[checkArraySize((n + 1L) * k)];
Arrays.fill(lH, NO_LABEL);
int d;
for (int j = 0; j < k; j++) {
final int shift = (k - 1) - j;
d = 0;
for (final int i : indices) {
// bool w ← (a[i] >> (k − 1) − j)) & 1
// h[j] = h[j] + w
// if w then:
if (testBit(frequencies[i], offsets[i], shift)) {
h[j]++;
// H[d][j] = i
lH[d * k + j] = i;
d++;
}
}
// process a(n+1) without extending the input frequencies array by 1
if (r.testBit(shift)) {
h[j]++;
lH[d * k + j] = n;
}
}
return new FLDRSampler(rng, n, k, h, lH);
}
/**
* Test the logical bit of the shifted integer representation.
* The value is assumed to have at most 53-bits of information. The offset
* is assumed to be positive. This is functionally equivalent to:
* <pre>
* BigInteger.valueOf(value).shiftLeft(offset).testBit(n)
* </pre>
*
* @param value 53-bit value.
* @param offset Left shift offset.
* @param n Index of bit to test.
* @return true if the bit is 1
*/
private static boolean testBit(long value, int offset, int n) {
if (n < offset) {
// All logical trailing bits are zero
return false;
}
// Test if outside the 53-bit value (note that the implicit 1 bit
// has been added to the 52-bit mantissas for 'normal' floating-point numbers).
final int bit = n - offset;
return bit <= MANTISSA_SIZE && (value & (1L << bit)) != 0;
}
/**
* Check the weights have a non-zero length.
*
* @param weights Weights.
* @return the length
*/
private static int checkWeightsNonZeroLength(double[] weights) {
if (weights == null || weights.length == 0) {
throw new IllegalArgumentException("weights must contain at least 1 value");
}
return weights.length;
}
/**
* Create the indices of non-zero values.
*
* @param values Values.
* @return the indices
*/
private static int[] indicesOfNonZero(long[] values) {
int n = 0;
final int[] indices = new int[values.length];
for (int i = 0; i < values.length; i++) {
if (values[i] != 0) {
indices[n++] = i;
}
}
return Arrays.copyOf(indices, n);
}
/**
* Find the index of the first non-zero frequency.
*
* @param frequencies Frequencies.
* @return the index
* @throws IllegalStateException if all frequencies are zero.
*/
static int indexOfNonZero(long[] frequencies) {
for (int i = 0; i < frequencies.length; i++) {
if (frequencies[i] != 0) {
return i;
}
}
throw new IllegalStateException("All frequencies are zero");
}
/**
* Check the size is valid for a 1D array.
*
* @param size Size
* @return the size as an {@code int}
* @throws IllegalArgumentException if the size is too large for a 1D array.
*/
static int checkArraySize(long size) {
if (size > MAX_ARRAY_SIZE) {
throw new IllegalArgumentException("Unable to allocate array of size: " + size);
}
return (int) size;
}
}
|
apache/lucene | 34,868 | lucene/core/src/test/org/apache/lucene/index/TestSoftDeletesRetentionMergePolicy.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.lucene.index;
import com.carrotsearch.randomizedtesting.generators.RandomPicks;
import java.io.IOException;
import java.time.Duration;
import java.time.Instant;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Supplier;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.IntPoint;
import org.apache.lucene.document.LongPoint;
import org.apache.lucene.document.NumericDocValuesField;
import org.apache.lucene.document.StringField;
import org.apache.lucene.search.FieldExistsQuery;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.PrefixQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.SearcherFactory;
import org.apache.lucene.search.SearcherManager;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.store.Directory;
import org.apache.lucene.tests.util.LuceneTestCase;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.IOUtils;
public class TestSoftDeletesRetentionMergePolicy extends LuceneTestCase {
public void testForceMergeFullyDeleted() throws IOException {
Directory dir = newDirectory();
AtomicBoolean letItGo = new AtomicBoolean(false);
MergePolicy policy =
new SoftDeletesRetentionMergePolicy(
"soft_delete",
() -> letItGo.get() ? new MatchNoDocsQuery() : new MatchAllDocsQuery(),
new LogDocMergePolicy());
IndexWriterConfig indexWriterConfig =
newIndexWriterConfig().setMergePolicy(policy).setSoftDeletesField("soft_delete");
IndexWriter writer = new IndexWriter(dir, indexWriterConfig);
Document doc = new Document();
doc.add(new StringField("id", "1", Field.Store.YES));
doc.add(new NumericDocValuesField("soft_delete", 1));
writer.addDocument(doc);
writer.commit();
doc = new Document();
doc.add(new StringField("id", "2", Field.Store.YES));
doc.add(new NumericDocValuesField("soft_delete", 1));
writer.addDocument(doc);
DirectoryReader reader = DirectoryReader.open(writer);
{
assertEquals(2, reader.leaves().size());
final SegmentReader segmentReader = (SegmentReader) reader.leaves().get(0).reader();
assertTrue(policy.keepFullyDeletedSegment(() -> segmentReader));
assertEquals(
0, policy.numDeletesToMerge(segmentReader.getSegmentInfo(), 0, () -> segmentReader));
}
{
SegmentReader segmentReader = (SegmentReader) reader.leaves().get(1).reader();
assertTrue(policy.keepFullyDeletedSegment(() -> segmentReader));
assertEquals(
0, policy.numDeletesToMerge(segmentReader.getSegmentInfo(), 0, () -> segmentReader));
writer.forceMerge(1);
reader.close();
}
reader = DirectoryReader.open(writer);
{
assertEquals(1, reader.leaves().size());
SegmentReader segmentReader = (SegmentReader) reader.leaves().get(0).reader();
assertEquals(2, reader.maxDoc());
assertTrue(policy.keepFullyDeletedSegment(() -> segmentReader));
assertEquals(
0, policy.numDeletesToMerge(segmentReader.getSegmentInfo(), 0, () -> segmentReader));
}
writer.forceMerge(1); // make sure we don't merge this
assertNull(DirectoryReader.openIfChanged(reader));
writer.forceMergeDeletes(); // make sure we don't merge this
assertNull(DirectoryReader.openIfChanged(reader));
letItGo.set(true);
writer.forceMergeDeletes(); // make sure we don't merge this
DirectoryReader directoryReader = DirectoryReader.openIfChanged(reader);
assertNotNull(directoryReader);
assertEquals(0, directoryReader.numDeletedDocs());
assertEquals(0, directoryReader.maxDoc());
IOUtils.close(directoryReader, reader, writer, dir);
}
public void testKeepFullyDeletedSegments() throws IOException {
Directory dir = newDirectory();
IndexWriterConfig indexWriterConfig =
newIndexWriterConfig().setMergePolicy(NoMergePolicy.INSTANCE);
IndexWriter writer = new IndexWriter(dir, indexWriterConfig);
Document doc = new Document();
doc.add(new StringField("id", "1", Field.Store.YES));
doc.add(new NumericDocValuesField("soft_delete", 1));
writer.addDocument(doc);
DirectoryReader reader = DirectoryReader.open(writer);
assertEquals(1, reader.leaves().size());
MergePolicy policy =
new SoftDeletesRetentionMergePolicy(
"soft_delete", () -> new FieldExistsQuery("keep_around"), NoMergePolicy.INSTANCE);
assertFalse(
policy.keepFullyDeletedSegment(() -> (SegmentReader) reader.leaves().get(0).reader()));
reader.close();
doc = new Document();
doc.add(new StringField("id", "1", Field.Store.YES));
doc.add(new NumericDocValuesField("keep_around", 1));
doc.add(new NumericDocValuesField("soft_delete", 1));
writer.addDocument(doc);
DirectoryReader reader1 = DirectoryReader.open(writer);
assertEquals(2, reader1.leaves().size());
assertFalse(
policy.keepFullyDeletedSegment(() -> (SegmentReader) reader1.leaves().get(0).reader()));
assertTrue(
policy.keepFullyDeletedSegment(() -> (SegmentReader) reader1.leaves().get(1).reader()));
IOUtils.close(reader1, writer, dir);
}
public void testFieldBasedRetention() throws IOException {
Directory dir = newDirectory();
IndexWriterConfig indexWriterConfig = newIndexWriterConfig();
Instant now = Instant.now();
Instant time24HoursAgo = now.minus(Duration.ofDays(1));
String softDeletesField = "soft_delete";
Supplier<Query> docsOfLast24Hours =
() ->
LongPoint.newRangeQuery(
"creation_date", time24HoursAgo.toEpochMilli(), now.toEpochMilli());
indexWriterConfig.setMergePolicy(
new SoftDeletesRetentionMergePolicy(
softDeletesField, docsOfLast24Hours, new LogDocMergePolicy()));
indexWriterConfig.setSoftDeletesField(softDeletesField);
IndexWriter writer = new IndexWriter(dir, indexWriterConfig);
long time28HoursAgo = now.minus(Duration.ofHours(28)).toEpochMilli();
Document doc = new Document();
doc.add(new StringField("id", "1", Field.Store.YES));
doc.add(new StringField("version", "1", Field.Store.YES));
doc.add(new LongPoint("creation_date", time28HoursAgo));
writer.addDocument(doc);
writer.flush();
long time26HoursAgo = now.minus(Duration.ofHours(26)).toEpochMilli();
doc = new Document();
doc.add(new StringField("id", "1", Field.Store.YES));
doc.add(new StringField("version", "2", Field.Store.YES));
doc.add(new LongPoint("creation_date", time26HoursAgo));
writer.softUpdateDocument(
new Term("id", "1"), doc, new NumericDocValuesField("soft_delete", 1));
if (random().nextBoolean()) {
writer.flush();
}
long time23HoursAgo = now.minus(Duration.ofHours(23)).toEpochMilli();
doc = new Document();
doc.add(new StringField("id", "1", Field.Store.YES));
doc.add(new StringField("version", "3", Field.Store.YES));
doc.add(new LongPoint("creation_date", time23HoursAgo));
writer.softUpdateDocument(
new Term("id", "1"), doc, new NumericDocValuesField("soft_delete", 1));
if (random().nextBoolean()) {
writer.flush();
}
long time12HoursAgo = now.minus(Duration.ofHours(12)).toEpochMilli();
doc = new Document();
doc.add(new StringField("id", "1", Field.Store.YES));
doc.add(new StringField("version", "4", Field.Store.YES));
doc.add(new LongPoint("creation_date", time12HoursAgo));
writer.softUpdateDocument(
new Term("id", "1"), doc, new NumericDocValuesField("soft_delete", 1));
if (random().nextBoolean()) {
writer.flush();
}
doc = new Document();
doc.add(new StringField("id", "1", Field.Store.YES));
doc.add(new StringField("version", "5", Field.Store.YES));
doc.add(new LongPoint("creation_date", now.toEpochMilli()));
writer.softUpdateDocument(
new Term("id", "1"), doc, new NumericDocValuesField("soft_delete", 1));
if (random().nextBoolean()) {
writer.flush();
}
writer.forceMerge(1);
DirectoryReader reader = DirectoryReader.open(writer);
assertEquals(1, reader.numDocs());
assertEquals(3, reader.maxDoc());
Set<String> versions = new HashSet<>();
versions.add(
reader.storedFields().document(0, Collections.singleton("version")).get("version"));
versions.add(
reader.storedFields().document(1, Collections.singleton("version")).get("version"));
versions.add(
reader.storedFields().document(2, Collections.singleton("version")).get("version"));
assertTrue(versions.contains("5"));
assertTrue(versions.contains("4"));
assertTrue(versions.contains("3"));
IOUtils.close(reader, writer, dir);
}
public void testKeepAllDocsAcrossMerges() throws IOException {
Directory dir = newDirectory();
IndexWriterConfig indexWriterConfig = newIndexWriterConfig();
indexWriterConfig.setMergePolicy(
new SoftDeletesRetentionMergePolicy(
"soft_delete", () -> new MatchAllDocsQuery(), new LogDocMergePolicy()));
indexWriterConfig.setSoftDeletesField("soft_delete");
IndexWriter writer = new IndexWriter(dir, indexWriterConfig);
Document doc = new Document();
doc.add(new StringField("id", "1", Field.Store.YES));
writer.softUpdateDocument(
new Term("id", "1"), doc, new NumericDocValuesField("soft_delete", 1));
writer.commit();
doc = new Document();
doc.add(new StringField("id", "1", Field.Store.YES));
writer.softUpdateDocument(
new Term("id", "1"), doc, new NumericDocValuesField("soft_delete", 1));
writer.commit();
doc = new Document();
doc.add(new StringField("id", "1", Field.Store.YES));
doc.add(new NumericDocValuesField("soft_delete", 1)); // already deleted
writer.softUpdateDocument(
new Term("id", "1"), doc, new NumericDocValuesField("soft_delete", 1));
writer.commit();
DirectoryReader reader = DirectoryReader.open(writer);
assertEquals(0, reader.numDocs());
assertEquals(3, reader.maxDoc());
assertEquals(0, writer.getDocStats().numDocs);
assertEquals(3, writer.getDocStats().maxDoc);
assertEquals(3, reader.leaves().size());
reader.close();
writer.forceMerge(1);
reader = DirectoryReader.open(writer);
assertEquals(0, reader.numDocs());
assertEquals(3, reader.maxDoc());
assertEquals(0, writer.getDocStats().numDocs);
assertEquals(3, writer.getDocStats().maxDoc);
assertEquals(1, reader.leaves().size());
IOUtils.close(reader, writer, dir);
}
/** tests soft deletes that carry over deleted documents on merge for history rentention. */
public void testSoftDeleteWithRetention() throws IOException, InterruptedException {
AtomicInteger seqIds = new AtomicInteger(0);
Directory dir = newDirectory();
IndexWriterConfig indexWriterConfig = newIndexWriterConfig();
indexWriterConfig.setMergePolicy(
new SoftDeletesRetentionMergePolicy(
"soft_delete",
() -> IntPoint.newRangeQuery("seq_id", seqIds.intValue() - 50, Integer.MAX_VALUE),
indexWriterConfig.getMergePolicy()));
indexWriterConfig.setSoftDeletesField("soft_delete");
IndexWriter writer = new IndexWriter(dir, indexWriterConfig);
Thread[] threads = new Thread[2 + random().nextInt(3)];
CountDownLatch startLatch = new CountDownLatch(1);
CountDownLatch started = new CountDownLatch(threads.length);
boolean updateSeveralDocs = random().nextBoolean();
Set<String> ids = Collections.synchronizedSet(new HashSet<>());
for (int i = 0; i < threads.length; i++) {
threads[i] =
new Thread(
() -> {
try {
started.countDown();
startLatch.await();
for (int d = 0; d < 100; d++) {
String id = String.valueOf(random().nextInt(10));
int seqId = seqIds.incrementAndGet();
if (updateSeveralDocs) {
Document doc = new Document();
doc.add(new StringField("id", id, Field.Store.YES));
doc.add(new IntPoint("seq_id", seqId));
writer.softUpdateDocuments(
new Term("id", id),
Arrays.asList(doc, doc),
new NumericDocValuesField("soft_delete", 1));
} else {
Document doc = new Document();
doc.add(new StringField("id", id, Field.Store.YES));
doc.add(new IntPoint("seq_id", seqId));
writer.softUpdateDocument(
new Term("id", id), doc, new NumericDocValuesField("soft_delete", 1));
}
if (rarely()) {
writer.flush();
}
ids.add(id);
}
} catch (IOException | InterruptedException e) {
throw new AssertionError(e);
}
});
threads[i].start();
}
started.await();
startLatch.countDown();
for (int i = 0; i < threads.length; i++) {
threads[i].join();
}
DirectoryReader reader = DirectoryReader.open(writer);
IndexSearcher searcher = new IndexSearcher(reader);
for (String id : ids) {
TopDocs topDocs = searcher.search(new TermQuery(new Term("id", id)), 10);
if (updateSeveralDocs) {
assertEquals(2, topDocs.totalHits.value());
assertEquals(Math.abs(topDocs.scoreDocs[0].doc - topDocs.scoreDocs[1].doc), 1);
} else {
assertEquals(1, topDocs.totalHits.value());
}
}
writer.addDocument(new Document()); // add a dummy doc to trigger a segment here
writer.flush();
writer.forceMerge(1);
DirectoryReader oldReader = reader;
reader = DirectoryReader.openIfChanged(reader, writer);
if (reader != null) {
oldReader.close();
assertNotSame(oldReader, reader);
} else {
reader = oldReader;
}
assertEquals(1, reader.leaves().size());
LeafReaderContext leafReaderContext = reader.leaves().get(0);
LeafReader leafReader = leafReaderContext.reader();
searcher =
new IndexSearcher(
new FilterLeafReader(leafReader) {
@Override
public CacheHelper getCoreCacheHelper() {
return leafReader.getCoreCacheHelper();
}
@Override
public CacheHelper getReaderCacheHelper() {
return leafReader.getReaderCacheHelper();
}
@Override
public Bits getLiveDocs() {
return null;
}
@Override
public int numDocs() {
return maxDoc();
}
});
TopDocs seq_id =
searcher.search(
IntPoint.newRangeQuery("seq_id", seqIds.intValue() - 50, Integer.MAX_VALUE), 10);
assertTrue(seq_id.totalHits.value() + " hits", seq_id.totalHits.value() >= 50);
searcher = new IndexSearcher(reader);
for (String id : ids) {
if (updateSeveralDocs) {
assertEquals(2, searcher.search(new TermQuery(new Term("id", id)), 10).totalHits.value());
} else {
assertEquals(1, searcher.search(new TermQuery(new Term("id", id)), 10).totalHits.value());
}
}
IOUtils.close(reader, writer, dir);
}
public void testForceMergeDeletes() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig config = newIndexWriterConfig().setSoftDeletesField("soft_delete");
config.setMergePolicy(
newMergePolicy(random(), false)); // no mock MP it might not select segments for force merge
if (random().nextBoolean()) {
config.setMergePolicy(
new SoftDeletesRetentionMergePolicy(
"soft_delete", () -> new MatchNoDocsQuery(), config.getMergePolicy()));
}
IndexWriter writer = new IndexWriter(dir, config);
// The first segment includes d1 and d2
for (int i = 0; i < 2; i++) {
Document d = new Document();
d.add(new StringField("id", Integer.toString(i), Field.Store.YES));
writer.addDocument(d);
}
writer.flush();
// The second segment includes only the tombstone
Document tombstone = new Document();
tombstone.add(new NumericDocValuesField("soft_delete", 1));
writer.softUpdateDocument(
new Term("id", "1"), tombstone, new NumericDocValuesField("soft_delete", 1));
writer.flush(
false,
true); // flush pending updates but don't trigger a merge, we run forceMergeDeletes below
// Now we have have two segments - both having soft-deleted documents.
// We expect any MP to merge these segments into one segment
// when calling forceMergeDeletes.
writer.forceMergeDeletes(true);
assertEquals(1, writer.cloneSegmentInfos().size());
assertEquals(1, writer.getDocStats().numDocs);
assertEquals(1, writer.getDocStats().maxDoc);
writer.close();
dir.close();
}
public void testDropFullySoftDeletedSegment() throws Exception {
Directory dir = newDirectory();
String softDelete = random().nextBoolean() ? null : "soft_delete";
IndexWriterConfig config = newIndexWriterConfig().setSoftDeletesField(softDelete);
config.setMergePolicy(newMergePolicy(random(), true));
if (softDelete != null && random().nextBoolean()) {
config.setMergePolicy(
new SoftDeletesRetentionMergePolicy(
softDelete, () -> new MatchNoDocsQuery(), config.getMergePolicy()));
}
IndexWriter writer = new IndexWriter(dir, config);
for (int i = 0; i < 2; i++) {
Document d = new Document();
d.add(new StringField("id", Integer.toString(i), Field.Store.YES));
writer.addDocument(d);
}
writer.flush();
assertEquals(1, writer.cloneSegmentInfos().size());
if (softDelete != null) {
// the newly created segment should be dropped as it is fully deleted (i.e. only contains
// deleted docs).
if (random().nextBoolean()) {
Document tombstone = new Document();
tombstone.add(new NumericDocValuesField(softDelete, 1));
writer.softUpdateDocument(
new Term("id", "1"), tombstone, new NumericDocValuesField(softDelete, 1));
} else {
Document doc = new Document();
doc.add(new StringField("id", Integer.toString(1), Field.Store.YES));
if (random().nextBoolean()) {
writer.softUpdateDocument(
new Term("id", "1"), doc, new NumericDocValuesField(softDelete, 1));
} else {
writer.addDocument(doc);
}
writer.updateDocValues(new Term("id", "1"), new NumericDocValuesField(softDelete, 1));
}
} else {
Document d = new Document();
d.add(new StringField("id", "1", Field.Store.YES));
writer.addDocument(d);
writer.deleteDocuments(new Term("id", "1"));
}
writer.commit();
IndexReader reader = DirectoryReader.open(writer);
assertEquals(reader.numDocs(), 1);
reader.close();
assertEquals(1, writer.cloneSegmentInfos().size());
writer.close();
dir.close();
}
public void testSoftDeleteWhileMergeSurvives() throws IOException {
Directory dir = newDirectory();
String softDelete = "soft_delete";
IndexWriterConfig config = newIndexWriterConfig().setSoftDeletesField(softDelete);
AtomicBoolean update = new AtomicBoolean(true);
config.setReaderPooling(true);
config.setMergePolicy(
new SoftDeletesRetentionMergePolicy(
"soft_delete", () -> new FieldExistsQuery("keep"), new LogDocMergePolicy()));
IndexWriter writer = new IndexWriter(dir, config);
writer
.getConfig()
.setMergedSegmentWarmer(
_ -> {
if (update.compareAndSet(true, false)) {
try {
writer.softUpdateDocument(
new Term("id", "0"),
new Document(),
new NumericDocValuesField(softDelete, 1),
new NumericDocValuesField("keep", 1));
writer.commit();
} catch (IOException e) {
throw new AssertionError(e);
}
}
});
boolean preExistingDeletes = random().nextBoolean();
for (int i = 0; i < 2; i++) {
Document d = new Document();
d.add(new StringField("id", Integer.toString(i), Field.Store.YES));
if (preExistingDeletes && random().nextBoolean()) {
writer.addDocument(d); // randomly add a preexisting hard-delete that we don't carry over
writer.deleteDocuments(new Term("id", Integer.toString(i)));
d.add(new NumericDocValuesField("keep", 1));
writer.addDocument(d);
} else {
d.add(new NumericDocValuesField("keep", 1));
writer.addDocument(d);
}
writer.flush();
}
writer.forceMerge(1);
writer.commit();
assertFalse(update.get());
DirectoryReader open = DirectoryReader.open(dir);
assertEquals(0, open.numDeletedDocs());
assertEquals(3, open.maxDoc());
IOUtils.close(open, writer, dir);
}
/*
* This test is trying to hard-delete a particular document while the segment is merged which is already soft-deleted
* This requires special logic inside IndexWriter#carryOverHardDeletes since docMaps are not created for this document.
*/
public void testDeleteDocWhileMergeThatIsSoftDeleted() throws IOException {
Directory dir = newDirectory();
String softDelete = "soft_delete";
IndexWriterConfig config = newIndexWriterConfig().setSoftDeletesField(softDelete);
AtomicBoolean delete = new AtomicBoolean(true);
config.setReaderPooling(true);
config.setMergePolicy(new LogDocMergePolicy());
IndexWriter writer = new IndexWriter(dir, config);
Document d = new Document();
d.add(new StringField("id", "0", Field.Store.YES));
writer.addDocument(d);
d = new Document();
d.add(new StringField("id", "1", Field.Store.YES));
writer.addDocument(d);
if (random().nextBoolean()) {
// randomly run with a preexisting hard delete
d = new Document();
d.add(new StringField("id", "2", Field.Store.YES));
writer.addDocument(d);
writer.deleteDocuments(new Term("id", "2"));
}
writer.flush();
DirectoryReader reader = DirectoryReader.open(writer);
writer.softUpdateDocument(
new Term("id", "0"), new Document(), new NumericDocValuesField(softDelete, 1));
writer.flush();
writer
.getConfig()
.setMergedSegmentWarmer(
_ -> {
if (delete.compareAndSet(true, false)) {
try {
long seqNo = writer.tryDeleteDocument(reader, 0);
assertTrue("seqId was -1", seqNo != -1);
} catch (IOException e) {
throw new AssertionError(e);
}
}
});
writer.forceMerge(1);
assertEquals(2, writer.getDocStats().numDocs);
assertEquals(2, writer.getDocStats().maxDoc);
assertFalse(delete.get());
IOUtils.close(reader, writer, dir);
}
public void testUndeleteDocument() throws IOException {
Directory dir = newDirectory();
String softDelete = "soft_delete";
IndexWriterConfig config =
newIndexWriterConfig()
.setSoftDeletesField(softDelete)
.setMergePolicy(
new SoftDeletesRetentionMergePolicy(
"soft_delete", MatchAllDocsQuery::new, new LogDocMergePolicy()));
config.setReaderPooling(true);
config.setMergePolicy(new LogDocMergePolicy());
IndexWriter writer = new IndexWriter(dir, config);
Document d = new Document();
d.add(new StringField("id", "0", Field.Store.YES));
d.add(new StringField("seq_id", "0", Field.Store.YES));
writer.addDocument(d);
d = new Document();
d.add(new StringField("id", "1", Field.Store.YES));
writer.addDocument(d);
writer.updateDocValues(new Term("id", "0"), new NumericDocValuesField("soft_delete", 1));
try (IndexReader reader = DirectoryReader.open(writer)) {
assertEquals(2, reader.maxDoc());
assertEquals(1, reader.numDocs());
}
doUpdate(new Term("id", "0"), writer, new NumericDocValuesField("soft_delete", null));
try (IndexReader reader = DirectoryReader.open(writer)) {
assertEquals(2, reader.maxDoc());
assertEquals(2, reader.numDocs());
}
IOUtils.close(writer, dir);
}
public void testMergeSoftDeleteAndHardDelete() throws Exception {
Directory dir = newDirectory();
String softDelete = "soft_delete";
IndexWriterConfig config =
newIndexWriterConfig()
.setSoftDeletesField(softDelete)
.setMergePolicy(
new SoftDeletesRetentionMergePolicy(
"soft_delete", MatchAllDocsQuery::new, new LogDocMergePolicy()));
config.setReaderPooling(true);
IndexWriter writer = new IndexWriter(dir, config);
Document d = new Document();
d.add(new StringField("id", "0", Field.Store.YES));
writer.addDocument(d);
d = new Document();
d.add(new StringField("id", "1", Field.Store.YES));
d.add(new NumericDocValuesField("soft_delete", 1));
writer.addDocument(d);
try (DirectoryReader reader = DirectoryReader.open(writer)) {
assertEquals(2, reader.maxDoc());
assertEquals(1, reader.numDocs());
}
while (true) {
try (DirectoryReader reader = DirectoryReader.open(writer)) {
TopDocs topDocs =
new IndexSearcher(new IncludeSoftDeletesWrapper(reader))
.search(new TermQuery(new Term("id", "1")), 1);
assertEquals(1, topDocs.totalHits.value());
if (writer.tryDeleteDocument(reader, topDocs.scoreDocs[0].doc) > 0) {
break;
}
}
}
writer.forceMergeDeletes(true);
assertEquals(1, writer.cloneSegmentInfos().size());
SegmentCommitInfo si = writer.cloneSegmentInfos().info(0);
assertEquals(0, si.getSoftDelCount()); // hard-delete should supersede the soft-delete
assertEquals(0, si.getDelCount());
assertEquals(1, si.info.maxDoc());
IOUtils.close(writer, dir);
}
public void testSoftDeleteWithTryUpdateDocValue() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig config =
newIndexWriterConfig()
.setSoftDeletesField("soft_delete")
.setMergePolicy(
new SoftDeletesRetentionMergePolicy(
"soft_delete", MatchAllDocsQuery::new, newLogMergePolicy()));
IndexWriter writer = new IndexWriter(dir, config);
SearcherManager sm = new SearcherManager(writer, new SearcherFactory());
Document d = new Document();
d.add(new StringField("id", "0", Field.Store.YES));
writer.addDocument(d);
sm.maybeRefreshBlocking();
doUpdate(
new Term("id", "0"),
writer,
new NumericDocValuesField("soft_delete", 1),
new NumericDocValuesField("other-field", 1));
sm.maybeRefreshBlocking();
assertEquals(1, writer.cloneSegmentInfos().size());
SegmentCommitInfo si = writer.cloneSegmentInfos().info(0);
assertEquals(1, si.getSoftDelCount());
assertEquals(1, si.info.maxDoc());
IOUtils.close(sm, writer, dir);
}
public void testMixedSoftDeletesAndHardDeletes() throws Exception {
Directory dir = newDirectory();
String softDeletesField = "soft-deletes";
IndexWriterConfig config =
newIndexWriterConfig()
.setMaxBufferedDocs(2 + random().nextInt(50))
.setRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH)
.setSoftDeletesField(softDeletesField)
.setMergePolicy(
new SoftDeletesRetentionMergePolicy(
softDeletesField, MatchAllDocsQuery::new, newMergePolicy()));
IndexWriter writer = new IndexWriter(dir, config);
int numDocs = 10 + random().nextInt(100);
Set<String> liveDocs = new HashSet<>();
for (int i = 0; i < numDocs; i++) {
String id = Integer.toString(i);
Document doc = new Document();
doc.add(new StringField("id", id, Field.Store.YES));
writer.addDocument(doc);
liveDocs.add(id);
}
for (int i = 0; i < numDocs; i++) {
if (random().nextBoolean()) {
String id = Integer.toString(i);
if (random().nextBoolean() && liveDocs.contains(id)) {
doUpdate(new Term("id", id), writer, new NumericDocValuesField(softDeletesField, 1));
} else {
Document doc = new Document();
doc.add(new StringField("id", "v" + id, Field.Store.YES));
writer.softUpdateDocument(
new Term("id", id), doc, new NumericDocValuesField(softDeletesField, 1));
liveDocs.add("v" + id);
}
}
if (random().nextBoolean() && liveDocs.isEmpty() == false) {
String delId = RandomPicks.randomFrom(random(), liveDocs);
if (random().nextBoolean()) {
doDelete(new Term("id", delId), writer);
} else {
writer.deleteDocuments(new Term("id", delId));
}
liveDocs.remove(delId);
}
}
try (DirectoryReader unwrapped = DirectoryReader.open(writer)) {
DirectoryReader reader = new IncludeSoftDeletesWrapper(unwrapped);
assertEquals(liveDocs.size(), reader.numDocs());
}
writer.commit();
IOUtils.close(writer, dir);
}
public void testRewriteRetentionQuery() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig config =
newIndexWriterConfig()
.setSoftDeletesField("soft_deletes")
.setMergePolicy(
new SoftDeletesRetentionMergePolicy(
"soft_deletes",
() -> new PrefixQuery(new Term("id", "foo")),
newMergePolicy()));
IndexWriter writer = new IndexWriter(dir, config);
Document d = new Document();
d.add(new StringField("id", "foo-1", Field.Store.YES));
writer.addDocument(d);
d = new Document();
d.add(new StringField("id", "foo-2", Field.Store.YES));
writer.softUpdateDocument(
new Term("id", "foo-1"), d, new NumericDocValuesField("soft_deletes", 1));
d = new Document();
d.add(new StringField("id", "bar-1", Field.Store.YES));
writer.addDocument(d);
d.add(new StringField("id", "bar-2", Field.Store.YES));
writer.softUpdateDocument(
new Term("id", "bar-1"), d, new NumericDocValuesField("soft_deletes", 1));
writer.forceMerge(1);
assertEquals(2, writer.getDocStats().numDocs); // foo-2, bar-2
assertEquals(3, writer.getDocStats().maxDoc); // foo-1, foo-2, bar-2
IOUtils.close(writer, dir);
}
static void doUpdate(Term doc, IndexWriter writer, Field... fields) throws IOException {
long seqId = -1;
do { // retry if we just committing a merge
try (DirectoryReader reader = DirectoryReader.open(writer)) {
TopDocs topDocs =
new IndexSearcher(new IncludeSoftDeletesWrapper(reader)).search(new TermQuery(doc), 10);
assertEquals(1, topDocs.totalHits.value());
int theDoc = topDocs.scoreDocs[0].doc;
seqId = writer.tryUpdateDocValue(reader, theDoc, fields);
}
} while (seqId == -1);
}
static void doDelete(Term doc, IndexWriter writer) throws IOException {
long seqId;
do { // retry if we just committing a merge
try (DirectoryReader reader = DirectoryReader.open(writer)) {
TopDocs topDocs =
new IndexSearcher(new IncludeSoftDeletesWrapper(reader)).search(new TermQuery(doc), 10);
assertEquals(1, topDocs.totalHits.value());
int theDoc = topDocs.scoreDocs[0].doc;
seqId = writer.tryDeleteDocument(reader, theDoc);
}
} while (seqId == -1);
}
private static final class IncludeSoftDeletesSubReaderWrapper
extends FilterDirectoryReader.SubReaderWrapper {
@Override
public LeafReader wrap(LeafReader reader) {
while (reader instanceof FilterLeafReader) {
reader = ((FilterLeafReader) reader).getDelegate();
}
Bits hardLiveDocs = ((SegmentReader) reader).getHardLiveDocs();
final int numDocs;
if (hardLiveDocs == null) {
numDocs = reader.maxDoc();
} else {
int bits = 0;
for (int i = 0; i < hardLiveDocs.length(); i++) {
if (hardLiveDocs.get(i)) {
bits++;
}
}
numDocs = bits;
}
return new FilterLeafReader(reader) {
@Override
public int numDocs() {
return numDocs;
}
@Override
public Bits getLiveDocs() {
return hardLiveDocs;
}
@Override
public CacheHelper getCoreCacheHelper() {
return null;
}
@Override
public CacheHelper getReaderCacheHelper() {
return null;
}
};
}
}
private static final class IncludeSoftDeletesWrapper extends FilterDirectoryReader {
IncludeSoftDeletesWrapper(DirectoryReader in) throws IOException {
super(in, new IncludeSoftDeletesSubReaderWrapper());
}
@Override
protected DirectoryReader doWrapDirectoryReader(DirectoryReader in) throws IOException {
return new IncludeSoftDeletesWrapper(in);
}
@Override
public CacheHelper getReaderCacheHelper() {
return null;
}
}
}
|
apache/parquet-java | 35,062 | parquet-hadoop/src/test/java/org/apache/parquet/crypto/TestPropertiesDrivenEncryption.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.parquet.crypto;
import static org.apache.parquet.hadoop.ParquetFileWriter.EFMAGIC;
import static org.apache.parquet.hadoop.ParquetFileWriter.EF_MAGIC_STR;
import static org.apache.parquet.hadoop.ParquetFileWriter.MAGIC;
import static org.apache.parquet.hadoop.ParquetFileWriter.Mode.OVERWRITE;
import static org.apache.parquet.hadoop.ParquetInputFormat.OFF_HEAP_DECRYPT_BUFFER_ENABLED;
import static org.apache.parquet.schema.PrimitiveType.PrimitiveTypeName.INT32;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Base64;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.parquet.bytes.DirectByteBufferAllocator;
import org.apache.parquet.bytes.HeapByteBufferAllocator;
import org.apache.parquet.bytes.TrackingByteBufferAllocator;
import org.apache.parquet.column.ParquetProperties.WriterVersion;
import org.apache.parquet.crypto.keytools.KeyToolkit;
import org.apache.parquet.crypto.keytools.PropertiesDrivenCryptoFactory;
import org.apache.parquet.crypto.keytools.mocks.InMemoryKMS;
import org.apache.parquet.crypto.keytools.mocks.LocalWrapInMemoryKMS;
import org.apache.parquet.example.data.Group;
import org.apache.parquet.example.data.simple.SimpleGroupFactory;
import org.apache.parquet.hadoop.ParquetReader;
import org.apache.parquet.hadoop.ParquetWriter;
import org.apache.parquet.hadoop.example.ExampleParquetWriter;
import org.apache.parquet.hadoop.example.GroupReadSupport;
import org.apache.parquet.io.api.Binary;
import org.apache.parquet.schema.MessageType;
import org.apache.parquet.schema.Types;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ErrorCollector;
import org.junit.rules.TemporaryFolder;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/*
* This file contains samples for writing and reading encrypted Parquet files in different
* encryption and decryption configurations, set using a properties-driven interface.
*
* The write sample produces number of parquet files, each encrypted with a different
* encryption configuration as described below.
* The name of each file is in the form of:
* <encryption-configuration-name>.parquet.encrypted or
* NO_ENCRYPTION.parquet for plaintext file.
*
* The read sample creates a set of decryption configurations and then uses each of them
* to read all encrypted files in the input directory.
*
* The different encryption and decryption configurations are listed below.
*
*
* A detailed description of the Parquet Modular Encryption specification can be found
* here:
* https://github.com/apache/parquet-format/blob/encryption/Encryption.md
*
* The write sample creates files with eight columns in the following
* encryption configurations:
*
* - ENCRYPT_COLUMNS_AND_FOOTER: Encrypt two columns and the footer, with different
* keys.
* - ENCRYPT_COLUMNS_PLAINTEXT_FOOTER: Encrypt two columns, with different keys.
* Do not encrypt footer (to enable legacy readers)
* - plaintext footer mode.
* - ENCRYPT_COLUMNS_AND_FOOTER_CTR: Encrypt two columns and the footer, with different
* keys. Use the alternative (AES_GCM_CTR_V1) algorithm.
* - COMPLETE_COLUMN_ENCRYPTION: Encrypt two columns and the footer, with different
* keys. Encrypt other columns with the footer key.
* - UNIFORM_ENCRYPTION: Encrypt all columns and footer with the same master key.
* - NO_ENCRYPTION: Do not encrypt anything
*
*
*
* The read sample uses each of the following decryption configurations to read every
* encrypted files in the input directory:
*
* - DECRYPT_WITH_KEY_RETRIEVER: Decrypt using key retriever that holds the keys of
* two encrypted columns and the footer key.
* - NO_DECRYPTION: Do not decrypt anything.
*/
@RunWith(Parameterized.class)
public class TestPropertiesDrivenEncryption {
@Parameterized.Parameters(
name =
"Run {index}: isKeyMaterialInternalStorage={0} isDoubleWrapping={1} isWrapLocally={2} isDecryptionDirectMemory={3} isV1={4}")
public static Collection<Object[]> data() {
Collection<Object[]> list = new ArrayList<>(8);
boolean[] flagValues = {false, true};
for (boolean keyMaterialInternalStorage : flagValues) {
for (boolean doubleWrapping : flagValues) {
for (boolean wrapLocally : flagValues) {
for (boolean isDecryptionDirectMemory : flagValues) {
for (boolean isV1 : flagValues) {
Object[] vector = {
keyMaterialInternalStorage, doubleWrapping, wrapLocally, isDecryptionDirectMemory, isV1
};
list.add(vector);
}
}
}
}
}
return list;
}
@Parameterized.Parameter // first data value (0) is default
public boolean isKeyMaterialInternalStorage;
@Parameterized.Parameter(value = 1)
public boolean isDoubleWrapping;
@Parameterized.Parameter(value = 2)
public boolean isWrapLocally;
@Parameterized.Parameter(value = 3)
public boolean isDecryptionDirectMemory;
@Parameterized.Parameter(value = 4)
public boolean isV1;
private static final Logger LOG = LoggerFactory.getLogger(TestPropertiesDrivenEncryption.class);
@Rule
public TemporaryFolder temporaryFolder = new TemporaryFolder();
@Rule
public final ErrorCollector errorCollector = new ErrorCollector();
private static final Base64.Encoder encoder = Base64.getEncoder();
private static final String FOOTER_MASTER_KEY =
encoder.encodeToString("0123456789012345".getBytes(StandardCharsets.UTF_8));
private static final String[] COLUMN_MASTER_KEYS = {
encoder.encodeToString("1234567890123450".getBytes(StandardCharsets.UTF_8)),
encoder.encodeToString("1234567890123451".getBytes(StandardCharsets.UTF_8)),
encoder.encodeToString("1234567890123452".getBytes(StandardCharsets.UTF_8)),
encoder.encodeToString("1234567890123453".getBytes(StandardCharsets.UTF_8)),
encoder.encodeToString("1234567890123454".getBytes(StandardCharsets.UTF_8)),
encoder.encodeToString("1234567890123455".getBytes(StandardCharsets.UTF_8))
};
private static final String UNIFORM_MASTER_KEY =
encoder.encodeToString("0123456789012346".getBytes(StandardCharsets.UTF_8));
private static final String[] COLUMN_MASTER_KEY_IDS = {"kc1", "kc2", "kc3", "kc4", "kc5", "kc6"};
private static final String FOOTER_MASTER_KEY_ID = "kf";
private static final String UNIFORM_MASTER_KEY_ID = "ku";
private static final String KEY_LIST = new StringBuilder()
.append(COLUMN_MASTER_KEY_IDS[0])
.append(": ")
.append(COLUMN_MASTER_KEYS[0])
.append(", ")
.append(COLUMN_MASTER_KEY_IDS[1])
.append(": ")
.append(COLUMN_MASTER_KEYS[1])
.append(", ")
.append(COLUMN_MASTER_KEY_IDS[2])
.append(": ")
.append(COLUMN_MASTER_KEYS[2])
.append(", ")
.append(COLUMN_MASTER_KEY_IDS[3])
.append(": ")
.append(COLUMN_MASTER_KEYS[3])
.append(", ")
.append(COLUMN_MASTER_KEY_IDS[4])
.append(": ")
.append(COLUMN_MASTER_KEYS[4])
.append(", ")
.append(COLUMN_MASTER_KEY_IDS[5])
.append(": ")
.append(COLUMN_MASTER_KEYS[5])
.append(", ")
.append(UNIFORM_MASTER_KEY_ID)
.append(": ")
.append(UNIFORM_MASTER_KEY)
.append(", ")
.append(FOOTER_MASTER_KEY_ID)
.append(": ")
.append(FOOTER_MASTER_KEY)
.toString();
private static final String NEW_FOOTER_MASTER_KEY =
encoder.encodeToString("9123456789012345".getBytes(StandardCharsets.UTF_8));
private static final String[] NEW_COLUMN_MASTER_KEYS = {
encoder.encodeToString("9234567890123450".getBytes(StandardCharsets.UTF_8)),
encoder.encodeToString("9234567890123451".getBytes(StandardCharsets.UTF_8)),
encoder.encodeToString("9234567890123452".getBytes(StandardCharsets.UTF_8)),
encoder.encodeToString("9234567890123453".getBytes(StandardCharsets.UTF_8)),
encoder.encodeToString("9234567890123454".getBytes(StandardCharsets.UTF_8)),
encoder.encodeToString("9234567890123455".getBytes(StandardCharsets.UTF_8))
};
private static final String NEW_UNIFORM_MASTER_KEY =
encoder.encodeToString("9123456789012346".getBytes(StandardCharsets.UTF_8));
private static final String NEW_KEY_LIST = new StringBuilder()
.append(COLUMN_MASTER_KEY_IDS[0])
.append(": ")
.append(NEW_COLUMN_MASTER_KEYS[0])
.append(", ")
.append(COLUMN_MASTER_KEY_IDS[1])
.append(": ")
.append(NEW_COLUMN_MASTER_KEYS[1])
.append(", ")
.append(COLUMN_MASTER_KEY_IDS[2])
.append(": ")
.append(NEW_COLUMN_MASTER_KEYS[2])
.append(", ")
.append(COLUMN_MASTER_KEY_IDS[3])
.append(": ")
.append(NEW_COLUMN_MASTER_KEYS[3])
.append(", ")
.append(COLUMN_MASTER_KEY_IDS[4])
.append(": ")
.append(NEW_COLUMN_MASTER_KEYS[4])
.append(", ")
.append(COLUMN_MASTER_KEY_IDS[5])
.append(": ")
.append(NEW_COLUMN_MASTER_KEYS[5])
.append(", ")
.append(UNIFORM_MASTER_KEY_ID)
.append(": ")
.append(NEW_UNIFORM_MASTER_KEY)
.append(", ")
.append(FOOTER_MASTER_KEY_ID)
.append(": ")
.append(NEW_FOOTER_MASTER_KEY)
.toString();
private static final String COLUMN_KEY_MAPPING = new StringBuilder()
.append(COLUMN_MASTER_KEY_IDS[0])
.append(": ")
.append(SingleRow.DOUBLE_FIELD_NAME)
.append("; ")
.append(COLUMN_MASTER_KEY_IDS[1])
.append(": ")
.append(SingleRow.FLOAT_FIELD_NAME)
.append("; ")
.append(COLUMN_MASTER_KEY_IDS[2])
.append(": ")
.append(SingleRow.BOOLEAN_FIELD_NAME)
.append("; ")
.append(COLUMN_MASTER_KEY_IDS[3])
.append(": ")
.append(SingleRow.INT32_FIELD_NAME)
.append("; ")
.append(COLUMN_MASTER_KEY_IDS[4])
.append(": ")
.append(SingleRow.BINARY_FIELD_NAME)
.append("; ")
.append(COLUMN_MASTER_KEY_IDS[5])
.append(": ")
.append(SingleRow.FIXED_LENGTH_BINARY_FIELD_NAME)
.toString();
private static final int NUM_THREADS = 4;
private static final int WAIT_FOR_WRITE_TO_END_SECONDS = 5;
private static final int WAIT_FOR_READ_TO_END_SECONDS = 5;
private static final boolean plaintextFilesAllowed = true;
// AesCtrDecryptor has a loop to update the cipher in chunks of CHUNK_LENGTH (4K). Use a large
// enough number of rows to ensure that the data generated is greater than the chunk length.
private static final int ROW_COUNT = 50000;
private static final List<SingleRow> DATA = Collections.unmodifiableList(SingleRow.generateRandomData(ROW_COUNT));
public enum EncryptionConfiguration {
ENCRYPT_COLUMNS_AND_FOOTER {
/**
* Encrypt two columns and the footer, with different master keys.
*/
public Configuration getHadoopConfiguration(TestPropertiesDrivenEncryption test) {
Configuration conf = getCryptoProperties(test);
setColumnAndFooterKeys(conf);
return conf;
}
},
UNIFORM_ENCRYPTION {
/**
* Encrypt all columns and the footer, with same master key.
*/
public Configuration getHadoopConfiguration(TestPropertiesDrivenEncryption test) {
Configuration conf = getCryptoProperties(test);
setUniformKey(conf);
return conf;
}
},
ENCRYPT_COLUMNS_PLAINTEXT_FOOTER {
/**
* Encrypt two columns, with different master keys.
* Don't encrypt footer.
* (plaintext footer mode, readable by legacy readers)
*/
public Configuration getHadoopConfiguration(TestPropertiesDrivenEncryption test) {
Configuration conf = getCryptoProperties(test);
setColumnAndFooterKeys(conf);
conf.setBoolean(PropertiesDrivenCryptoFactory.PLAINTEXT_FOOTER_PROPERTY_NAME, true);
return conf;
}
},
ENCRYPT_COLUMNS_AND_FOOTER_CTR {
/**
* Encrypt two columns and the footer, with different master keys.
* Use AES_GCM_CTR_V1 algorithm.
*/
public Configuration getHadoopConfiguration(TestPropertiesDrivenEncryption test) {
Configuration conf = getCryptoProperties(test);
setColumnAndFooterKeys(conf);
conf.set(
PropertiesDrivenCryptoFactory.ENCRYPTION_ALGORITHM_PROPERTY_NAME,
ParquetCipher.AES_GCM_CTR_V1.toString());
return conf;
}
},
COMPLETE_COLUMN_ENCRYPTION {
/**
* Encrypt two columns and the footer, with different master keys.
* Encrypt other columns with the footer master key.
*/
public Configuration getHadoopConfiguration(TestPropertiesDrivenEncryption test) {
Configuration conf = getCryptoProperties(test);
setColumnAndFooterKeys(conf);
conf.setBoolean(PropertiesDrivenCryptoFactory.COMPLETE_COLUMN_ENCRYPTION_PROPERTY_NAME, true);
return conf;
}
},
NO_ENCRYPTION {
/**
* Do not encrypt anything
*/
public Configuration getHadoopConfiguration(TestPropertiesDrivenEncryption test) {
return null;
}
};
public abstract Configuration getHadoopConfiguration(TestPropertiesDrivenEncryption test);
}
public enum DecryptionConfiguration {
DECRYPT_WITH_KEY_RETRIEVER {
/**
* Decrypt using key retriever callback that holds the keys
* of two encrypted columns and the footer key.
*/
public Configuration getHadoopConfiguration(TestPropertiesDrivenEncryption test) {
Configuration conf = getCryptoProperties(test);
return conf;
}
},
NO_DECRYPTION {
/**
* Do not decrypt anything.
*/
public Configuration getHadoopConfiguration(TestPropertiesDrivenEncryption test) {
return null;
}
};
public abstract Configuration getHadoopConfiguration(TestPropertiesDrivenEncryption test);
}
/**
* Get Hadoop configuration with configuration properties common to all encryption modes
*/
private static Configuration getCryptoProperties(TestPropertiesDrivenEncryption test) {
Configuration conf = new Configuration();
conf.set(
EncryptionPropertiesFactory.CRYPTO_FACTORY_CLASS_PROPERTY_NAME,
PropertiesDrivenCryptoFactory.class.getName());
conf.set(OFF_HEAP_DECRYPT_BUFFER_ENABLED, String.valueOf(test.isDecryptionDirectMemory));
if (test.isWrapLocally) {
conf.set(KeyToolkit.KMS_CLIENT_CLASS_PROPERTY_NAME, LocalWrapInMemoryKMS.class.getName());
} else {
conf.set(KeyToolkit.KMS_CLIENT_CLASS_PROPERTY_NAME, InMemoryKMS.class.getName());
}
conf.set(InMemoryKMS.KEY_LIST_PROPERTY_NAME, KEY_LIST);
conf.set(InMemoryKMS.NEW_KEY_LIST_PROPERTY_NAME, NEW_KEY_LIST);
conf.setBoolean(KeyToolkit.KEY_MATERIAL_INTERNAL_PROPERTY_NAME, test.isKeyMaterialInternalStorage);
conf.setBoolean(KeyToolkit.DOUBLE_WRAPPING_PROPERTY_NAME, test.isDoubleWrapping);
return conf;
}
/**
* Set configuration properties to encrypt columns and the footer with different master keys
*/
private static void setColumnAndFooterKeys(Configuration conf) {
conf.set(PropertiesDrivenCryptoFactory.COLUMN_KEYS_PROPERTY_NAME, COLUMN_KEY_MAPPING);
conf.set(PropertiesDrivenCryptoFactory.FOOTER_KEY_PROPERTY_NAME, FOOTER_MASTER_KEY_ID);
}
/**
* Set uniform encryption configuration property
*/
private static void setUniformKey(Configuration conf) {
conf.set(PropertiesDrivenCryptoFactory.UNIFORM_KEY_PROPERTY_NAME, UNIFORM_MASTER_KEY_ID);
}
@Test
public void testWriteReadEncryptedParquetFiles() throws IOException {
Path rootPath = new Path(temporaryFolder.getRoot().getPath());
LOG.info("======== testWriteReadEncryptedParquetFiles {} ========", rootPath.toString());
LOG.info(
"Run: isKeyMaterialInternalStorage={} isDoubleWrapping={} isWrapLocally={}",
isKeyMaterialInternalStorage,
isDoubleWrapping,
isWrapLocally);
KeyToolkit.removeCacheEntriesForAllTokens();
ExecutorService threadPool = Executors.newFixedThreadPool(NUM_THREADS);
try {
// Write using various encryption configurations.
testWriteEncryptedParquetFiles(rootPath, DATA, threadPool);
// Read using various decryption configurations.
testReadEncryptedParquetFiles(rootPath, DATA, threadPool);
} finally {
threadPool.shutdown();
}
}
private void testWriteEncryptedParquetFiles(Path root, List<SingleRow> data, ExecutorService threadPool)
throws IOException {
EncryptionConfiguration[] encryptionConfigurations = EncryptionConfiguration.values();
for (EncryptionConfiguration encryptionConfiguration : encryptionConfigurations) {
Path encryptionConfigurationFolderPath = new Path(root, encryptionConfiguration.name());
Configuration conf = new Configuration();
FileSystem fs = encryptionConfigurationFolderPath.getFileSystem(conf);
if (fs.exists(encryptionConfigurationFolderPath)) {
fs.delete(encryptionConfigurationFolderPath, true);
}
fs.mkdirs(encryptionConfigurationFolderPath);
KeyToolkit.removeCacheEntriesForAllTokens();
CountDownLatch latch = new CountDownLatch(NUM_THREADS);
for (int i = 0; i < NUM_THREADS; ++i) {
final int threadNumber = i;
threadPool.execute(() -> {
writeEncryptedParquetFile(
encryptionConfigurationFolderPath, data, encryptionConfiguration, threadNumber);
latch.countDown();
});
}
try {
latch.await(WAIT_FOR_WRITE_TO_END_SECONDS, TimeUnit.SECONDS);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
}
}
private void writeEncryptedParquetFile(
Path root, List<SingleRow> data, EncryptionConfiguration encryptionConfiguration, int threadNumber) {
MessageType schema = SingleRow.getSchema();
SimpleGroupFactory f = new SimpleGroupFactory(schema);
int pageSize = data.size() / 10; // Ensure that several pages will be created
int rowGroupSize = pageSize * 6 * 5; // Ensure that there are more row-groups created
Path file = new Path(root, getFileName(root, encryptionConfiguration, threadNumber));
LOG.info("\nWrite " + file.toString());
Configuration conf = encryptionConfiguration.getHadoopConfiguration(this);
FileEncryptionProperties fileEncryptionProperties = null;
try {
if (null == conf) {
conf = new Configuration();
} else {
EncryptionPropertiesFactory cryptoFactory = EncryptionPropertiesFactory.loadFactory(conf);
fileEncryptionProperties = cryptoFactory.getFileEncryptionProperties(conf, file, null);
}
} catch (Exception e) {
addErrorToErrorCollectorAndLog("Failed writing " + file.toString(), e, encryptionConfiguration, null);
return;
}
WriterVersion writerVersion = this.isV1 ? WriterVersion.PARQUET_1_0 : WriterVersion.PARQUET_2_0;
try (ParquetWriter<Group> writer = ExampleParquetWriter.builder(file)
.withConf(conf)
.withWriteMode(OVERWRITE)
.withType(schema)
.withPageSize(pageSize)
.withRowGroupSize(rowGroupSize)
.withEncryption(fileEncryptionProperties)
.withWriterVersion(writerVersion)
.build()) {
for (SingleRow singleRow : data) {
writer.write(f.newGroup()
.append(SingleRow.BOOLEAN_FIELD_NAME, singleRow.boolean_field)
.append(SingleRow.INT32_FIELD_NAME, singleRow.int32_field)
.append(SingleRow.FLOAT_FIELD_NAME, singleRow.float_field)
.append(SingleRow.DOUBLE_FIELD_NAME, singleRow.double_field)
.append(SingleRow.BINARY_FIELD_NAME, Binary.fromConstantByteArray(singleRow.ba_field))
.append(
SingleRow.FIXED_LENGTH_BINARY_FIELD_NAME,
Binary.fromConstantByteArray(singleRow.flba_field))
.append(SingleRow.PLAINTEXT_INT32_FIELD_NAME, singleRow.plaintext_int32_field));
}
} catch (Exception e) {
addErrorToErrorCollectorAndLog("Failed writing " + file.toString(), e, encryptionConfiguration, null);
}
}
private Path getFileName(Path root, EncryptionConfiguration encryptionConfiguration, int threadNumber) {
String suffix =
(EncryptionConfiguration.NO_ENCRYPTION == encryptionConfiguration) ? ".parquet" : ".parquet.encrypted";
return new Path(root, encryptionConfiguration.toString() + "_" + threadNumber + suffix);
}
private void testReadEncryptedParquetFiles(Path root, List<SingleRow> data, ExecutorService threadPool)
throws IOException {
readFilesMultithreaded(root, data, threadPool, false /*keysRotated*/);
if (isWrapLocally) {
return; // key rotation is not supported with local key wrapping
}
LOG.info("--> Start master key rotation");
Configuration hadoopConfigForRotation =
EncryptionConfiguration.ENCRYPT_COLUMNS_AND_FOOTER.getHadoopConfiguration(this);
hadoopConfigForRotation.set(InMemoryKMS.NEW_KEY_LIST_PROPERTY_NAME, NEW_KEY_LIST);
InMemoryKMS.startKeyRotation(hadoopConfigForRotation);
EncryptionConfiguration[] encryptionConfigurations = EncryptionConfiguration.values();
for (EncryptionConfiguration encryptionConfiguration : encryptionConfigurations) {
if (EncryptionConfiguration.NO_ENCRYPTION == encryptionConfiguration) {
continue; // no rotation of plaintext files
}
Path encryptionConfigurationFolderPath = new Path(root, encryptionConfiguration.name());
try {
LOG.info("Rotate master keys in folder: " + encryptionConfigurationFolderPath.toString());
KeyToolkit.rotateMasterKeys(encryptionConfigurationFolderPath.toString(), hadoopConfigForRotation);
} catch (UnsupportedOperationException e) {
if (isKeyMaterialInternalStorage || isWrapLocally) {
LOG.info("Key material file not found, as expected");
} else {
errorCollector.addError(e);
}
return; // No use in continuing reading if rotation wasn't successful
} catch (Exception e) {
errorCollector.addError(e);
return; // No use in continuing reading if rotation wasn't successful
}
}
InMemoryKMS.finishKeyRotation();
LOG.info("--> Finish master key rotation");
LOG.info("--> Read files again with new keys");
readFilesMultithreaded(root, data, threadPool, true /*keysRotated*/);
}
private void readFilesMultithreaded(
Path root, List<SingleRow> data, ExecutorService threadPool, boolean keysRotated) {
DecryptionConfiguration[] decryptionConfigurations = DecryptionConfiguration.values();
for (DecryptionConfiguration decryptionConfiguration : decryptionConfigurations) {
LOG.info("\n\n");
LOG.info("==> Decryption configuration {}\n", decryptionConfiguration);
Configuration hadoopConfig = decryptionConfiguration.getHadoopConfiguration(this);
if (null != hadoopConfig) {
KeyToolkit.removeCacheEntriesForAllTokens();
}
EncryptionConfiguration[] encryptionConfigurations = EncryptionConfiguration.values();
for (EncryptionConfiguration encryptionConfiguration : encryptionConfigurations) {
Path encryptionConfigurationFolderPath = new Path(root, encryptionConfiguration.name());
CountDownLatch latch = new CountDownLatch(NUM_THREADS);
for (int i = 0; i < NUM_THREADS; ++i) {
final int threadNumber = i;
threadPool.execute(() -> {
Path file =
getFileName(encryptionConfigurationFolderPath, encryptionConfiguration, threadNumber);
LOG.info("--> Read file {} {}", file.toString(), encryptionConfiguration);
readFileAndCheckResult(
hadoopConfig,
encryptionConfiguration,
decryptionConfiguration,
data,
file,
keysRotated);
latch.countDown();
});
}
try {
latch.await(WAIT_FOR_READ_TO_END_SECONDS, TimeUnit.SECONDS);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
}
}
}
private void readFileAndCheckResult(
Configuration hadoopConfig,
EncryptionConfiguration encryptionConfiguration,
DecryptionConfiguration decryptionConfiguration,
List<SingleRow> data,
Path file,
boolean keysRotated) {
FileDecryptionProperties fileDecryptionProperties = null;
if (null == hadoopConfig) {
hadoopConfig = new Configuration();
} else {
DecryptionPropertiesFactory cryptoFactory = DecryptionPropertiesFactory.loadFactory(hadoopConfig);
fileDecryptionProperties = cryptoFactory.getFileDecryptionProperties(hadoopConfig, file);
}
// Set schema to only point to the non-encrypted columns
if ((decryptionConfiguration == DecryptionConfiguration.NO_DECRYPTION)
&& (encryptionConfiguration == EncryptionConfiguration.ENCRYPT_COLUMNS_PLAINTEXT_FOOTER)) {
hadoopConfig.set(
"parquet.read.schema",
Types.buildMessage()
.optional(INT32)
.named(SingleRow.PLAINTEXT_INT32_FIELD_NAME)
.named("FormatTestObject")
.toString());
}
if ((encryptionConfiguration != EncryptionConfiguration.NO_ENCRYPTION)
&& (encryptionConfiguration != EncryptionConfiguration.ENCRYPT_COLUMNS_PLAINTEXT_FOOTER)) {
byte[] magic = new byte[MAGIC.length];
try (InputStream is = new FileInputStream(file.toString())) {
if (is.read(magic) != magic.length) {
throw new RuntimeException("ERROR");
}
if (!Arrays.equals(EFMAGIC, magic)) {
addErrorToErrorCollectorAndLog(
"File doesn't start with " + EF_MAGIC_STR,
encryptionConfiguration,
decryptionConfiguration);
}
} catch (IOException e) {
addErrorToErrorCollectorAndLog(
"Failed to read magic string at the beginning of file",
e,
encryptionConfiguration,
decryptionConfiguration);
}
}
if (keysRotated && (null != hadoopConfig.get(InMemoryKMS.KEY_LIST_PROPERTY_NAME))) {
hadoopConfig.set(InMemoryKMS.KEY_LIST_PROPERTY_NAME, NEW_KEY_LIST);
}
int rowNum = 0;
try (TrackingByteBufferAllocator allocator = TrackingByteBufferAllocator.wrap(
this.isDecryptionDirectMemory
? new DirectByteBufferAllocator()
: new HeapByteBufferAllocator());
ParquetReader<Group> reader = ParquetReader.builder(new GroupReadSupport(), file)
.withConf(hadoopConfig)
.withAllocator(allocator)
.withDecryption(fileDecryptionProperties)
.build()) {
for (Group group = reader.read(); group != null; group = reader.read()) {
SingleRow rowExpected = data.get(rowNum++);
// plaintext columns
if (rowExpected.plaintext_int32_field != group.getInteger(SingleRow.PLAINTEXT_INT32_FIELD_NAME, 0)) {
addErrorToErrorCollectorAndLog("Wrong int", encryptionConfiguration, decryptionConfiguration);
}
// encrypted columns
if (decryptionConfiguration != DecryptionConfiguration.NO_DECRYPTION) {
if (rowExpected.boolean_field != group.getBoolean(SingleRow.BOOLEAN_FIELD_NAME, 0)) {
addErrorToErrorCollectorAndLog("Wrong bool", encryptionConfiguration, decryptionConfiguration);
}
if (rowExpected.int32_field != group.getInteger(SingleRow.INT32_FIELD_NAME, 0)) {
addErrorToErrorCollectorAndLog("Wrong int", encryptionConfiguration, decryptionConfiguration);
}
if (rowExpected.float_field != group.getFloat(SingleRow.FLOAT_FIELD_NAME, 0)) {
addErrorToErrorCollectorAndLog("Wrong float", encryptionConfiguration, decryptionConfiguration);
}
if (rowExpected.double_field != group.getDouble(SingleRow.DOUBLE_FIELD_NAME, 0)) {
addErrorToErrorCollectorAndLog(
"Wrong double", encryptionConfiguration, decryptionConfiguration);
}
if ((null != rowExpected.ba_field)
&& !Arrays.equals(
rowExpected.ba_field,
group.getBinary(SingleRow.BINARY_FIELD_NAME, 0)
.getBytes())) {
addErrorToErrorCollectorAndLog(
"Wrong byte array", encryptionConfiguration, decryptionConfiguration);
}
if (!Arrays.equals(
rowExpected.flba_field,
group.getBinary(SingleRow.FIXED_LENGTH_BINARY_FIELD_NAME, 0)
.getBytes())) {
addErrorToErrorCollectorAndLog(
"Wrong fixed-length byte array", encryptionConfiguration, decryptionConfiguration);
}
}
}
} catch (Exception e) {
checkResult(file.getName(), decryptionConfiguration, e);
}
hadoopConfig.unset("parquet.read.schema");
}
/**
* Check that the decryption result is as expected.
*/
private void checkResult(String file, DecryptionConfiguration decryptionConfiguration, Exception exception) {
String errorMessage = exception.getMessage();
String exceptionMsg = (null == errorMessage ? exception.getClass().getName() : errorMessage);
// Extract encryptionConfigurationNumber from the parquet file name.
EncryptionConfiguration encryptionConfiguration = getEncryptionConfigurationFromFilename(file);
if (!plaintextFilesAllowed) {
// Encryption_configuration null encryptor, so parquet is plaintext.
// An exception is expected to be thrown if the file is being decrypted.
if (encryptionConfiguration == EncryptionConfiguration.NO_ENCRYPTION) {
if (decryptionConfiguration == DecryptionConfiguration.DECRYPT_WITH_KEY_RETRIEVER) {
if (!exceptionMsg.endsWith("Applying decryptor on plaintext file")) {
addErrorToErrorCollectorAndLog(
"Expecting exception Applying decryptor on plaintext file",
exceptionMsg,
encryptionConfiguration,
decryptionConfiguration);
} else {
LOG.info("Exception as expected: " + exceptionMsg);
}
return;
}
}
}
// Decryption configuration is null, so only plaintext file can be read. An exception is expected to
// be thrown if the file is encrypted.
if (decryptionConfiguration == DecryptionConfiguration.NO_DECRYPTION) {
if ((encryptionConfiguration != EncryptionConfiguration.NO_ENCRYPTION
&& encryptionConfiguration != EncryptionConfiguration.ENCRYPT_COLUMNS_PLAINTEXT_FOOTER)) {
if (!exceptionMsg.endsWith("No encryption key list") && !exceptionMsg.endsWith("No keys available")) {
addErrorToErrorCollectorAndLog(
"Expecting No keys available exception",
exceptionMsg,
encryptionConfiguration,
decryptionConfiguration);
} else {
LOG.info("Exception as expected: " + exceptionMsg);
}
return;
}
}
exception.printStackTrace();
addErrorToErrorCollectorAndLog(
"Didn't expect an exception", exceptionMsg, encryptionConfiguration, decryptionConfiguration);
}
private EncryptionConfiguration getEncryptionConfigurationFromFilename(String file) {
if (!file.endsWith(".parquet.encrypted")) {
return null;
}
String fileNamePrefix = file.replaceFirst("(.*)_[0-9]+.parquet.encrypted", "$1");
;
try {
return EncryptionConfiguration.valueOf(fileNamePrefix.toUpperCase());
} catch (IllegalArgumentException e) {
LOG.error("File name doesn't match any known encryption configuration: " + file);
synchronized (errorCollector) {
errorCollector.addError(e);
}
return null;
}
}
private void addErrorToErrorCollectorAndLog(
String errorMessage,
String exceptionMessage,
EncryptionConfiguration encryptionConfiguration,
DecryptionConfiguration decryptionConfiguration) {
String fullErrorMessage = String.format(
"%s - %s Error: %s, but got [%s]",
encryptionConfiguration, decryptionConfiguration, errorMessage, exceptionMessage);
synchronized (errorCollector) {
errorCollector.addError(new Throwable(fullErrorMessage));
}
LOG.error(fullErrorMessage);
}
private void addErrorToErrorCollectorAndLog(
String errorMessage,
EncryptionConfiguration encryptionConfiguration,
DecryptionConfiguration decryptionConfiguration) {
String fullErrorMessage =
String.format("%s - %s Error: %s", encryptionConfiguration, decryptionConfiguration, errorMessage);
synchronized (errorCollector) {
errorCollector.addError(new Throwable(fullErrorMessage));
}
LOG.error(fullErrorMessage);
}
private void addErrorToErrorCollectorAndLog(
String errorMessage,
Throwable exception,
EncryptionConfiguration encryptionConfiguration,
DecryptionConfiguration decryptionConfiguration) {
String errorMessageWithExceptionDetails =
String.format("%s %s %s", errorMessage, exception.getClass().getName(), exception.getMessage());
addErrorToErrorCollectorAndLog(
errorMessageWithExceptionDetails, encryptionConfiguration, decryptionConfiguration);
exception.printStackTrace();
}
}
|
googleapis/google-cloud-java | 34,960 | java-area120-tables/proto-google-area120-tables-v1alpha1/src/main/java/com/google/area120/tables/v1alpha1/Row.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/area120/tables/v1alpha1/tables.proto
// Protobuf Java Version: 3.25.8
package com.google.area120.tables.v1alpha1;
/**
*
*
* <pre>
* A single row in a table.
* </pre>
*
* Protobuf type {@code google.area120.tables.v1alpha1.Row}
*/
public final class Row extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.area120.tables.v1alpha1.Row)
RowOrBuilder {
private static final long serialVersionUID = 0L;
// Use Row.newBuilder() to construct.
private Row(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private Row() {
name_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new Row();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.area120.tables.v1alpha1.TablesProto
.internal_static_google_area120_tables_v1alpha1_Row_descriptor;
}
@SuppressWarnings({"rawtypes"})
@java.lang.Override
protected com.google.protobuf.MapFieldReflectionAccessor internalGetMapFieldReflection(
int number) {
switch (number) {
case 2:
return internalGetValues();
default:
throw new RuntimeException("Invalid map field number: " + number);
}
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.area120.tables.v1alpha1.TablesProto
.internal_static_google_area120_tables_v1alpha1_Row_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.area120.tables.v1alpha1.Row.class,
com.google.area120.tables.v1alpha1.Row.Builder.class);
}
public static final int NAME_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object name_ = "";
/**
*
*
* <pre>
* The resource name of the row.
* Row names have the form `tables/{table}/rows/{row}`.
* The name is ignored when creating a row.
* </pre>
*
* <code>string name = 1;</code>
*
* @return The name.
*/
@java.lang.Override
public java.lang.String getName() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
}
}
/**
*
*
* <pre>
* The resource name of the row.
* Row names have the form `tables/{table}/rows/{row}`.
* The name is ignored when creating a row.
* </pre>
*
* <code>string name = 1;</code>
*
* @return The bytes for name.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int VALUES_FIELD_NUMBER = 2;
private static final class ValuesDefaultEntryHolder {
static final com.google.protobuf.MapEntry<java.lang.String, com.google.protobuf.Value>
defaultEntry =
com.google.protobuf.MapEntry
.<java.lang.String, com.google.protobuf.Value>newDefaultInstance(
com.google.area120.tables.v1alpha1.TablesProto
.internal_static_google_area120_tables_v1alpha1_Row_ValuesEntry_descriptor,
com.google.protobuf.WireFormat.FieldType.STRING,
"",
com.google.protobuf.WireFormat.FieldType.MESSAGE,
com.google.protobuf.Value.getDefaultInstance());
}
@SuppressWarnings("serial")
private com.google.protobuf.MapField<java.lang.String, com.google.protobuf.Value> values_;
private com.google.protobuf.MapField<java.lang.String, com.google.protobuf.Value>
internalGetValues() {
if (values_ == null) {
return com.google.protobuf.MapField.emptyMapField(ValuesDefaultEntryHolder.defaultEntry);
}
return values_;
}
public int getValuesCount() {
return internalGetValues().getMap().size();
}
/**
*
*
* <pre>
* The values of the row. This is a map of column key to value.
* Key is user entered name(default) or the internal column id based on
* the view in the request.
* </pre>
*
* <code>map<string, .google.protobuf.Value> values = 2;</code>
*/
@java.lang.Override
public boolean containsValues(java.lang.String key) {
if (key == null) {
throw new NullPointerException("map key");
}
return internalGetValues().getMap().containsKey(key);
}
/** Use {@link #getValuesMap()} instead. */
@java.lang.Override
@java.lang.Deprecated
public java.util.Map<java.lang.String, com.google.protobuf.Value> getValues() {
return getValuesMap();
}
/**
*
*
* <pre>
* The values of the row. This is a map of column key to value.
* Key is user entered name(default) or the internal column id based on
* the view in the request.
* </pre>
*
* <code>map<string, .google.protobuf.Value> values = 2;</code>
*/
@java.lang.Override
public java.util.Map<java.lang.String, com.google.protobuf.Value> getValuesMap() {
return internalGetValues().getMap();
}
/**
*
*
* <pre>
* The values of the row. This is a map of column key to value.
* Key is user entered name(default) or the internal column id based on
* the view in the request.
* </pre>
*
* <code>map<string, .google.protobuf.Value> values = 2;</code>
*/
@java.lang.Override
public /* nullable */ com.google.protobuf.Value getValuesOrDefault(
java.lang.String key,
/* nullable */
com.google.protobuf.Value defaultValue) {
if (key == null) {
throw new NullPointerException("map key");
}
java.util.Map<java.lang.String, com.google.protobuf.Value> map = internalGetValues().getMap();
return map.containsKey(key) ? map.get(key) : defaultValue;
}
/**
*
*
* <pre>
* The values of the row. This is a map of column key to value.
* Key is user entered name(default) or the internal column id based on
* the view in the request.
* </pre>
*
* <code>map<string, .google.protobuf.Value> values = 2;</code>
*/
@java.lang.Override
public com.google.protobuf.Value getValuesOrThrow(java.lang.String key) {
if (key == null) {
throw new NullPointerException("map key");
}
java.util.Map<java.lang.String, com.google.protobuf.Value> map = internalGetValues().getMap();
if (!map.containsKey(key)) {
throw new java.lang.IllegalArgumentException();
}
return map.get(key);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_);
}
com.google.protobuf.GeneratedMessageV3.serializeStringMapTo(
output, internalGetValues(), ValuesDefaultEntryHolder.defaultEntry, 2);
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_);
}
for (java.util.Map.Entry<java.lang.String, com.google.protobuf.Value> entry :
internalGetValues().getMap().entrySet()) {
com.google.protobuf.MapEntry<java.lang.String, com.google.protobuf.Value> values__ =
ValuesDefaultEntryHolder.defaultEntry
.newBuilderForType()
.setKey(entry.getKey())
.setValue(entry.getValue())
.build();
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, values__);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.area120.tables.v1alpha1.Row)) {
return super.equals(obj);
}
com.google.area120.tables.v1alpha1.Row other = (com.google.area120.tables.v1alpha1.Row) obj;
if (!getName().equals(other.getName())) return false;
if (!internalGetValues().equals(other.internalGetValues())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + NAME_FIELD_NUMBER;
hash = (53 * hash) + getName().hashCode();
if (!internalGetValues().getMap().isEmpty()) {
hash = (37 * hash) + VALUES_FIELD_NUMBER;
hash = (53 * hash) + internalGetValues().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.area120.tables.v1alpha1.Row parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.area120.tables.v1alpha1.Row parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.area120.tables.v1alpha1.Row parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.area120.tables.v1alpha1.Row parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.area120.tables.v1alpha1.Row parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.area120.tables.v1alpha1.Row parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.area120.tables.v1alpha1.Row parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.area120.tables.v1alpha1.Row parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.area120.tables.v1alpha1.Row parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.area120.tables.v1alpha1.Row parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.area120.tables.v1alpha1.Row parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.area120.tables.v1alpha1.Row parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.area120.tables.v1alpha1.Row prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* A single row in a table.
* </pre>
*
* Protobuf type {@code google.area120.tables.v1alpha1.Row}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.area120.tables.v1alpha1.Row)
com.google.area120.tables.v1alpha1.RowOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.area120.tables.v1alpha1.TablesProto
.internal_static_google_area120_tables_v1alpha1_Row_descriptor;
}
@SuppressWarnings({"rawtypes"})
protected com.google.protobuf.MapFieldReflectionAccessor internalGetMapFieldReflection(
int number) {
switch (number) {
case 2:
return internalGetValues();
default:
throw new RuntimeException("Invalid map field number: " + number);
}
}
@SuppressWarnings({"rawtypes"})
protected com.google.protobuf.MapFieldReflectionAccessor internalGetMutableMapFieldReflection(
int number) {
switch (number) {
case 2:
return internalGetMutableValues();
default:
throw new RuntimeException("Invalid map field number: " + number);
}
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.area120.tables.v1alpha1.TablesProto
.internal_static_google_area120_tables_v1alpha1_Row_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.area120.tables.v1alpha1.Row.class,
com.google.area120.tables.v1alpha1.Row.Builder.class);
}
// Construct using com.google.area120.tables.v1alpha1.Row.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
name_ = "";
internalGetMutableValues().clear();
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.area120.tables.v1alpha1.TablesProto
.internal_static_google_area120_tables_v1alpha1_Row_descriptor;
}
@java.lang.Override
public com.google.area120.tables.v1alpha1.Row getDefaultInstanceForType() {
return com.google.area120.tables.v1alpha1.Row.getDefaultInstance();
}
@java.lang.Override
public com.google.area120.tables.v1alpha1.Row build() {
com.google.area120.tables.v1alpha1.Row result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.area120.tables.v1alpha1.Row buildPartial() {
com.google.area120.tables.v1alpha1.Row result =
new com.google.area120.tables.v1alpha1.Row(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.area120.tables.v1alpha1.Row result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.name_ = name_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.values_ = internalGetValues().build(ValuesDefaultEntryHolder.defaultEntry);
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.area120.tables.v1alpha1.Row) {
return mergeFrom((com.google.area120.tables.v1alpha1.Row) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.area120.tables.v1alpha1.Row other) {
if (other == com.google.area120.tables.v1alpha1.Row.getDefaultInstance()) return this;
if (!other.getName().isEmpty()) {
name_ = other.name_;
bitField0_ |= 0x00000001;
onChanged();
}
internalGetMutableValues().mergeFrom(other.internalGetValues());
bitField0_ |= 0x00000002;
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
name_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
com.google.protobuf.MapEntry<java.lang.String, com.google.protobuf.Value> values__ =
input.readMessage(
ValuesDefaultEntryHolder.defaultEntry.getParserForType(),
extensionRegistry);
internalGetMutableValues()
.ensureBuilderMap()
.put(values__.getKey(), values__.getValue());
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object name_ = "";
/**
*
*
* <pre>
* The resource name of the row.
* Row names have the form `tables/{table}/rows/{row}`.
* The name is ignored when creating a row.
* </pre>
*
* <code>string name = 1;</code>
*
* @return The name.
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The resource name of the row.
* Row names have the form `tables/{table}/rows/{row}`.
* The name is ignored when creating a row.
* </pre>
*
* <code>string name = 1;</code>
*
* @return The bytes for name.
*/
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The resource name of the row.
* Row names have the form `tables/{table}/rows/{row}`.
* The name is ignored when creating a row.
* </pre>
*
* <code>string name = 1;</code>
*
* @param value The name to set.
* @return This builder for chaining.
*/
public Builder setName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
name_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* The resource name of the row.
* Row names have the form `tables/{table}/rows/{row}`.
* The name is ignored when creating a row.
* </pre>
*
* <code>string name = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearName() {
name_ = getDefaultInstance().getName();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* The resource name of the row.
* Row names have the form `tables/{table}/rows/{row}`.
* The name is ignored when creating a row.
* </pre>
*
* <code>string name = 1;</code>
*
* @param value The bytes for name to set.
* @return This builder for chaining.
*/
public Builder setNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
name_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private static final class ValuesConverter
implements com.google.protobuf.MapFieldBuilder.Converter<
java.lang.String, com.google.protobuf.ValueOrBuilder, com.google.protobuf.Value> {
@java.lang.Override
public com.google.protobuf.Value build(com.google.protobuf.ValueOrBuilder val) {
if (val instanceof com.google.protobuf.Value) {
return (com.google.protobuf.Value) val;
}
return ((com.google.protobuf.Value.Builder) val).build();
}
@java.lang.Override
public com.google.protobuf.MapEntry<java.lang.String, com.google.protobuf.Value>
defaultEntry() {
return ValuesDefaultEntryHolder.defaultEntry;
}
}
;
private static final ValuesConverter valuesConverter = new ValuesConverter();
private com.google.protobuf.MapFieldBuilder<
java.lang.String,
com.google.protobuf.ValueOrBuilder,
com.google.protobuf.Value,
com.google.protobuf.Value.Builder>
values_;
private com.google.protobuf.MapFieldBuilder<
java.lang.String,
com.google.protobuf.ValueOrBuilder,
com.google.protobuf.Value,
com.google.protobuf.Value.Builder>
internalGetValues() {
if (values_ == null) {
return new com.google.protobuf.MapFieldBuilder<>(valuesConverter);
}
return values_;
}
private com.google.protobuf.MapFieldBuilder<
java.lang.String,
com.google.protobuf.ValueOrBuilder,
com.google.protobuf.Value,
com.google.protobuf.Value.Builder>
internalGetMutableValues() {
if (values_ == null) {
values_ = new com.google.protobuf.MapFieldBuilder<>(valuesConverter);
}
bitField0_ |= 0x00000002;
onChanged();
return values_;
}
public int getValuesCount() {
return internalGetValues().ensureBuilderMap().size();
}
/**
*
*
* <pre>
* The values of the row. This is a map of column key to value.
* Key is user entered name(default) or the internal column id based on
* the view in the request.
* </pre>
*
* <code>map<string, .google.protobuf.Value> values = 2;</code>
*/
@java.lang.Override
public boolean containsValues(java.lang.String key) {
if (key == null) {
throw new NullPointerException("map key");
}
return internalGetValues().ensureBuilderMap().containsKey(key);
}
/** Use {@link #getValuesMap()} instead. */
@java.lang.Override
@java.lang.Deprecated
public java.util.Map<java.lang.String, com.google.protobuf.Value> getValues() {
return getValuesMap();
}
/**
*
*
* <pre>
* The values of the row. This is a map of column key to value.
* Key is user entered name(default) or the internal column id based on
* the view in the request.
* </pre>
*
* <code>map<string, .google.protobuf.Value> values = 2;</code>
*/
@java.lang.Override
public java.util.Map<java.lang.String, com.google.protobuf.Value> getValuesMap() {
return internalGetValues().getImmutableMap();
}
/**
*
*
* <pre>
* The values of the row. This is a map of column key to value.
* Key is user entered name(default) or the internal column id based on
* the view in the request.
* </pre>
*
* <code>map<string, .google.protobuf.Value> values = 2;</code>
*/
@java.lang.Override
public /* nullable */ com.google.protobuf.Value getValuesOrDefault(
java.lang.String key,
/* nullable */
com.google.protobuf.Value defaultValue) {
if (key == null) {
throw new NullPointerException("map key");
}
java.util.Map<java.lang.String, com.google.protobuf.ValueOrBuilder> map =
internalGetMutableValues().ensureBuilderMap();
return map.containsKey(key) ? valuesConverter.build(map.get(key)) : defaultValue;
}
/**
*
*
* <pre>
* The values of the row. This is a map of column key to value.
* Key is user entered name(default) or the internal column id based on
* the view in the request.
* </pre>
*
* <code>map<string, .google.protobuf.Value> values = 2;</code>
*/
@java.lang.Override
public com.google.protobuf.Value getValuesOrThrow(java.lang.String key) {
if (key == null) {
throw new NullPointerException("map key");
}
java.util.Map<java.lang.String, com.google.protobuf.ValueOrBuilder> map =
internalGetMutableValues().ensureBuilderMap();
if (!map.containsKey(key)) {
throw new java.lang.IllegalArgumentException();
}
return valuesConverter.build(map.get(key));
}
public Builder clearValues() {
bitField0_ = (bitField0_ & ~0x00000002);
internalGetMutableValues().clear();
return this;
}
/**
*
*
* <pre>
* The values of the row. This is a map of column key to value.
* Key is user entered name(default) or the internal column id based on
* the view in the request.
* </pre>
*
* <code>map<string, .google.protobuf.Value> values = 2;</code>
*/
public Builder removeValues(java.lang.String key) {
if (key == null) {
throw new NullPointerException("map key");
}
internalGetMutableValues().ensureBuilderMap().remove(key);
return this;
}
/** Use alternate mutation accessors instead. */
@java.lang.Deprecated
public java.util.Map<java.lang.String, com.google.protobuf.Value> getMutableValues() {
bitField0_ |= 0x00000002;
return internalGetMutableValues().ensureMessageMap();
}
/**
*
*
* <pre>
* The values of the row. This is a map of column key to value.
* Key is user entered name(default) or the internal column id based on
* the view in the request.
* </pre>
*
* <code>map<string, .google.protobuf.Value> values = 2;</code>
*/
public Builder putValues(java.lang.String key, com.google.protobuf.Value value) {
if (key == null) {
throw new NullPointerException("map key");
}
if (value == null) {
throw new NullPointerException("map value");
}
internalGetMutableValues().ensureBuilderMap().put(key, value);
bitField0_ |= 0x00000002;
return this;
}
/**
*
*
* <pre>
* The values of the row. This is a map of column key to value.
* Key is user entered name(default) or the internal column id based on
* the view in the request.
* </pre>
*
* <code>map<string, .google.protobuf.Value> values = 2;</code>
*/
public Builder putAllValues(java.util.Map<java.lang.String, com.google.protobuf.Value> values) {
for (java.util.Map.Entry<java.lang.String, com.google.protobuf.Value> e : values.entrySet()) {
if (e.getKey() == null || e.getValue() == null) {
throw new NullPointerException();
}
}
internalGetMutableValues().ensureBuilderMap().putAll(values);
bitField0_ |= 0x00000002;
return this;
}
/**
*
*
* <pre>
* The values of the row. This is a map of column key to value.
* Key is user entered name(default) or the internal column id based on
* the view in the request.
* </pre>
*
* <code>map<string, .google.protobuf.Value> values = 2;</code>
*/
public com.google.protobuf.Value.Builder putValuesBuilderIfAbsent(java.lang.String key) {
java.util.Map<java.lang.String, com.google.protobuf.ValueOrBuilder> builderMap =
internalGetMutableValues().ensureBuilderMap();
com.google.protobuf.ValueOrBuilder entry = builderMap.get(key);
if (entry == null) {
entry = com.google.protobuf.Value.newBuilder();
builderMap.put(key, entry);
}
if (entry instanceof com.google.protobuf.Value) {
entry = ((com.google.protobuf.Value) entry).toBuilder();
builderMap.put(key, entry);
}
return (com.google.protobuf.Value.Builder) entry;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.area120.tables.v1alpha1.Row)
}
// @@protoc_insertion_point(class_scope:google.area120.tables.v1alpha1.Row)
private static final com.google.area120.tables.v1alpha1.Row DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.area120.tables.v1alpha1.Row();
}
public static com.google.area120.tables.v1alpha1.Row getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<Row> PARSER =
new com.google.protobuf.AbstractParser<Row>() {
@java.lang.Override
public Row parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<Row> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<Row> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.area120.tables.v1alpha1.Row getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-api-java-client-services | 35,275 | clients/google-api-services-ml/v1/1.26.0/com/google/api/services/ml/v1/model/GoogleCloudMlV1TrainingInput.java | /*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.ml.v1.model;
/**
* Represents input parameters for a training job. When using the gcloud command to submit your
* training job, you can specify the input parameters as command-line arguments and/or in a YAML
* configuration file referenced from the --config command-line argument. For details, see the guide
* to submitting a training job.
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Cloud Machine Learning Engine. For a detailed
* explanation see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class GoogleCloudMlV1TrainingInput extends com.google.api.client.json.GenericJson {
/**
* Optional. Command line arguments to pass to the program.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<java.lang.String> args;
/**
* Optional. The set of Hyperparameters to tune.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private GoogleCloudMlV1HyperparameterSpec hyperparameters;
/**
* Optional. A Google Cloud Storage path in which to store training outputs and other data needed
* for training. This path is passed to your TensorFlow program as the '--job-dir' command-line
* argument. The benefit of specifying this field is that Cloud ML validates the path for use in
* training.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String jobDir;
/**
* Optional. The configuration for your master worker.
*
* You should only set `masterConfig.acceleratorConfig` if `masterType` is set to a Compute Engine
* machine type. Learn about [restrictions on accelerator configurations for training.](/ml-
* engine/docs/tensorflow/using-gpus#compute-engine-machine-types-with-gpu)
*
* Set `masterConfig.imageUri` only if you build a custom image. Only one of
* `masterConfig.imageUri` and `runtimeVersion` should be set. Learn more about [configuring
* custom containers](/ml-engine/docs/distributed-training-containers).
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private GoogleCloudMlV1ReplicaConfig masterConfig;
/**
* Optional. Specifies the type of virtual machine to use for your training job's master worker.
*
* The following types are supported:
*
* standard A basic machine configuration suitable for training simple models with small
* to moderate datasets. large_model A machine with a lot of memory, specially suited
* for parameter servers when your model is large (having many hidden layers or layers with very
* large numbers of nodes). complex_model_s A machine suitable for the master and
* workers of the cluster when your model requires more computation than the standard machine
* can handle satisfactorily. complex_model_m A machine with roughly twice the number
* of cores and roughly double the memory of complex_model_s. complex_model_l A
* machine with roughly twice the number of cores and roughly double the memory of
* complex_model_m. standard_gpu A machine equivalent to standard that also includes a
* single NVIDIA Tesla K80 GPU. See more about using GPUs to train your model.
* complex_model_m_gpu A machine equivalent to complex_model_m that also includes four
* NVIDIA Tesla K80 GPUs. complex_model_l_gpu A machine equivalent to complex_model_l
* that also includes eight NVIDIA Tesla K80 GPUs. standard_p100 A machine equivalent
* to standard that also includes a single NVIDIA Tesla P100 GPU. complex_model_m_p100
* A machine equivalent to complex_model_m that also includes four NVIDIA Tesla P100 GPUs.
* standard_v100 A machine equivalent to standard that also includes a single NVIDIA Tesla
* V100 GPU. large_model_v100 A machine equivalent to large_model that also includes a
* single NVIDIA Tesla V100 GPU. complex_model_m_v100 A machine equivalent to
* complex_model_m that also includes four NVIDIA Tesla V100 GPUs. complex_model_l_v100
* A machine equivalent to complex_model_l that also includes eight NVIDIA Tesla V100 GPUs.
* cloud_tpu A TPU VM including one Cloud TPU. See more about using TPUs to train your
* model.
*
* You may also use certain Compute Engine machine types directly in this field. The following
* types are supported:
*
* - `n1-standard-4` - `n1-standard-8` - `n1-standard-16` - `n1-standard-32` - `n1-standard-64` -
* `n1-standard-96` - `n1-highmem-2` - `n1-highmem-4` - `n1-highmem-8` - `n1-highmem-16` -
* `n1-highmem-32` - `n1-highmem-64` - `n1-highmem-96` - `n1-highcpu-16` - `n1-highcpu-32` -
* `n1-highcpu-64` - `n1-highcpu-96`
*
* See more about [using Compute Engine machine types](/ml-engine/docs/tensorflow/machine-types
* #compute-engine-machine-types).
*
* You must set this value when `scaleTier` is set to `CUSTOM`.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String masterType;
/**
* Optional. The maximum job running time. The default is 7 days.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private String maxRunningTime;
/**
* Required. The Google Cloud Storage location of the packages with the training program and any
* additional dependencies. The maximum number of package URIs is 100.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<java.lang.String> packageUris;
/**
* Optional. The configuration for parameter servers.
*
* You should only set `parameterServerConfig.acceleratorConfig` if `parameterServerConfigType` is
* set to a Compute Engine machine type. [Learn about restrictions on accelerator configurations
* for training.](/ml-engine/docs/tensorflow/using-gpus#compute-engine-machine-types-with-gpu)
*
* Set `parameterServerConfig.imageUri` only if you build a custom image for your parameter
* server. If `parameterServerConfig.imageUri` has not been set, AI Platform uses the value of
* `masterConfig.imageUri`. Learn more about [configuring custom containers](/ml-engine/docs
* /distributed-training-containers).
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private GoogleCloudMlV1ReplicaConfig parameterServerConfig;
/**
* Optional. The number of parameter server replicas to use for the training job. Each replica in
* the cluster will be of the type specified in `parameter_server_type`.
*
* This value can only be used when `scale_tier` is set to `CUSTOM`.If you set this value, you
* must also set `parameter_server_type`.
*
* The default value is zero.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long parameterServerCount;
/**
* Optional. Specifies the type of virtual machine to use for your training job's parameter
* server.
*
* The supported values are the same as those described in the entry for `master_type`.
*
* This value must be consistent with the category of machine type that `masterType` uses. In
* other words, both must be AI Platform machine types or both must be Compute Engine machine
* types.
*
* This value must be present when `scaleTier` is set to `CUSTOM` and `parameter_server_count` is
* greater than zero.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String parameterServerType;
/**
* Required. The Python module name to run after installing the packages.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String pythonModule;
/**
* Optional. The version of Python used in training. If not set, the default version is '2.7'.
* Python '3.5' is available when `runtime_version` is set to '1.4' and above. Python '2.7' works
* with all supported runtime versions.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String pythonVersion;
/**
* Required. The Google Compute Engine region to run the training job in. See the available
* regions for AI Platform services.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String region;
/**
* Optional. The AI Platform runtime version to use for training. If not set, AI Platform uses the
* default stable version, 1.0. For more information, see the runtime version list and how to
* manage runtime versions.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String runtimeVersion;
/**
* Required. Specifies the machine types, the number of replicas for workers and parameter
* servers.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String scaleTier;
/**
* Optional. The configuration for workers.
*
* You should only set `workerConfig.acceleratorConfig` if `workerType` is set to a Compute Engine
* machine type. [Learn about restrictions on accelerator configurations for training.](/ml-
* engine/docs/tensorflow/using-gpus#compute-engine-machine-types-with-gpu)
*
* Set `workerConfig.imageUri` only if you build a custom image for your worker. If
* `workerConfig.imageUri` has not been set, AI Platform uses the value of
* `masterConfig.imageUri`. Learn more about [configuring custom containers](/ml-engine/docs
* /distributed-training-containers).
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private GoogleCloudMlV1ReplicaConfig workerConfig;
/**
* Optional. The number of worker replicas to use for the training job. Each replica in the
* cluster will be of the type specified in `worker_type`.
*
* This value can only be used when `scale_tier` is set to `CUSTOM`. If you set this value, you
* must also set `worker_type`.
*
* The default value is zero.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long workerCount;
/**
* Optional. Specifies the type of virtual machine to use for your training job's worker nodes.
*
* The supported values are the same as those described in the entry for `masterType`.
*
* This value must be consistent with the category of machine type that `masterType` uses. In
* other words, both must be AI Platform machine types or both must be Compute Engine machine
* types.
*
* If you use `cloud_tpu` for this value, see special instructions for [configuring a custom TPU
* machine](/ml-engine/docs/tensorflow/using-tpus#configuring_a_custom_tpu_machine).
*
* This value must be present when `scaleTier` is set to `CUSTOM` and `workerCount` is greater
* than zero.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String workerType;
/**
* Optional. Command line arguments to pass to the program.
* @return value or {@code null} for none
*/
public java.util.List<java.lang.String> getArgs() {
return args;
}
/**
* Optional. Command line arguments to pass to the program.
* @param args args or {@code null} for none
*/
public GoogleCloudMlV1TrainingInput setArgs(java.util.List<java.lang.String> args) {
this.args = args;
return this;
}
/**
* Optional. The set of Hyperparameters to tune.
* @return value or {@code null} for none
*/
public GoogleCloudMlV1HyperparameterSpec getHyperparameters() {
return hyperparameters;
}
/**
* Optional. The set of Hyperparameters to tune.
* @param hyperparameters hyperparameters or {@code null} for none
*/
public GoogleCloudMlV1TrainingInput setHyperparameters(GoogleCloudMlV1HyperparameterSpec hyperparameters) {
this.hyperparameters = hyperparameters;
return this;
}
/**
* Optional. A Google Cloud Storage path in which to store training outputs and other data needed
* for training. This path is passed to your TensorFlow program as the '--job-dir' command-line
* argument. The benefit of specifying this field is that Cloud ML validates the path for use in
* training.
* @return value or {@code null} for none
*/
public java.lang.String getJobDir() {
return jobDir;
}
/**
* Optional. A Google Cloud Storage path in which to store training outputs and other data needed
* for training. This path is passed to your TensorFlow program as the '--job-dir' command-line
* argument. The benefit of specifying this field is that Cloud ML validates the path for use in
* training.
* @param jobDir jobDir or {@code null} for none
*/
public GoogleCloudMlV1TrainingInput setJobDir(java.lang.String jobDir) {
this.jobDir = jobDir;
return this;
}
/**
* Optional. The configuration for your master worker.
*
* You should only set `masterConfig.acceleratorConfig` if `masterType` is set to a Compute Engine
* machine type. Learn about [restrictions on accelerator configurations for training.](/ml-
* engine/docs/tensorflow/using-gpus#compute-engine-machine-types-with-gpu)
*
* Set `masterConfig.imageUri` only if you build a custom image. Only one of
* `masterConfig.imageUri` and `runtimeVersion` should be set. Learn more about [configuring
* custom containers](/ml-engine/docs/distributed-training-containers).
* @return value or {@code null} for none
*/
public GoogleCloudMlV1ReplicaConfig getMasterConfig() {
return masterConfig;
}
/**
* Optional. The configuration for your master worker.
*
* You should only set `masterConfig.acceleratorConfig` if `masterType` is set to a Compute Engine
* machine type. Learn about [restrictions on accelerator configurations for training.](/ml-
* engine/docs/tensorflow/using-gpus#compute-engine-machine-types-with-gpu)
*
* Set `masterConfig.imageUri` only if you build a custom image. Only one of
* `masterConfig.imageUri` and `runtimeVersion` should be set. Learn more about [configuring
* custom containers](/ml-engine/docs/distributed-training-containers).
* @param masterConfig masterConfig or {@code null} for none
*/
public GoogleCloudMlV1TrainingInput setMasterConfig(GoogleCloudMlV1ReplicaConfig masterConfig) {
this.masterConfig = masterConfig;
return this;
}
/**
* Optional. Specifies the type of virtual machine to use for your training job's master worker.
*
* The following types are supported:
*
* standard A basic machine configuration suitable for training simple models with small
* to moderate datasets. large_model A machine with a lot of memory, specially suited
* for parameter servers when your model is large (having many hidden layers or layers with very
* large numbers of nodes). complex_model_s A machine suitable for the master and
* workers of the cluster when your model requires more computation than the standard machine
* can handle satisfactorily. complex_model_m A machine with roughly twice the number
* of cores and roughly double the memory of complex_model_s. complex_model_l A
* machine with roughly twice the number of cores and roughly double the memory of
* complex_model_m. standard_gpu A machine equivalent to standard that also includes a
* single NVIDIA Tesla K80 GPU. See more about using GPUs to train your model.
* complex_model_m_gpu A machine equivalent to complex_model_m that also includes four
* NVIDIA Tesla K80 GPUs. complex_model_l_gpu A machine equivalent to complex_model_l
* that also includes eight NVIDIA Tesla K80 GPUs. standard_p100 A machine equivalent
* to standard that also includes a single NVIDIA Tesla P100 GPU. complex_model_m_p100
* A machine equivalent to complex_model_m that also includes four NVIDIA Tesla P100 GPUs.
* standard_v100 A machine equivalent to standard that also includes a single NVIDIA Tesla
* V100 GPU. large_model_v100 A machine equivalent to large_model that also includes a
* single NVIDIA Tesla V100 GPU. complex_model_m_v100 A machine equivalent to
* complex_model_m that also includes four NVIDIA Tesla V100 GPUs. complex_model_l_v100
* A machine equivalent to complex_model_l that also includes eight NVIDIA Tesla V100 GPUs.
* cloud_tpu A TPU VM including one Cloud TPU. See more about using TPUs to train your
* model.
*
* You may also use certain Compute Engine machine types directly in this field. The following
* types are supported:
*
* - `n1-standard-4` - `n1-standard-8` - `n1-standard-16` - `n1-standard-32` - `n1-standard-64` -
* `n1-standard-96` - `n1-highmem-2` - `n1-highmem-4` - `n1-highmem-8` - `n1-highmem-16` -
* `n1-highmem-32` - `n1-highmem-64` - `n1-highmem-96` - `n1-highcpu-16` - `n1-highcpu-32` -
* `n1-highcpu-64` - `n1-highcpu-96`
*
* See more about [using Compute Engine machine types](/ml-engine/docs/tensorflow/machine-types
* #compute-engine-machine-types).
*
* You must set this value when `scaleTier` is set to `CUSTOM`.
* @return value or {@code null} for none
*/
public java.lang.String getMasterType() {
return masterType;
}
/**
* Optional. Specifies the type of virtual machine to use for your training job's master worker.
*
* The following types are supported:
*
* standard A basic machine configuration suitable for training simple models with small
* to moderate datasets. large_model A machine with a lot of memory, specially suited
* for parameter servers when your model is large (having many hidden layers or layers with very
* large numbers of nodes). complex_model_s A machine suitable for the master and
* workers of the cluster when your model requires more computation than the standard machine
* can handle satisfactorily. complex_model_m A machine with roughly twice the number
* of cores and roughly double the memory of complex_model_s. complex_model_l A
* machine with roughly twice the number of cores and roughly double the memory of
* complex_model_m. standard_gpu A machine equivalent to standard that also includes a
* single NVIDIA Tesla K80 GPU. See more about using GPUs to train your model.
* complex_model_m_gpu A machine equivalent to complex_model_m that also includes four
* NVIDIA Tesla K80 GPUs. complex_model_l_gpu A machine equivalent to complex_model_l
* that also includes eight NVIDIA Tesla K80 GPUs. standard_p100 A machine equivalent
* to standard that also includes a single NVIDIA Tesla P100 GPU. complex_model_m_p100
* A machine equivalent to complex_model_m that also includes four NVIDIA Tesla P100 GPUs.
* standard_v100 A machine equivalent to standard that also includes a single NVIDIA Tesla
* V100 GPU. large_model_v100 A machine equivalent to large_model that also includes a
* single NVIDIA Tesla V100 GPU. complex_model_m_v100 A machine equivalent to
* complex_model_m that also includes four NVIDIA Tesla V100 GPUs. complex_model_l_v100
* A machine equivalent to complex_model_l that also includes eight NVIDIA Tesla V100 GPUs.
* cloud_tpu A TPU VM including one Cloud TPU. See more about using TPUs to train your
* model.
*
* You may also use certain Compute Engine machine types directly in this field. The following
* types are supported:
*
* - `n1-standard-4` - `n1-standard-8` - `n1-standard-16` - `n1-standard-32` - `n1-standard-64` -
* `n1-standard-96` - `n1-highmem-2` - `n1-highmem-4` - `n1-highmem-8` - `n1-highmem-16` -
* `n1-highmem-32` - `n1-highmem-64` - `n1-highmem-96` - `n1-highcpu-16` - `n1-highcpu-32` -
* `n1-highcpu-64` - `n1-highcpu-96`
*
* See more about [using Compute Engine machine types](/ml-engine/docs/tensorflow/machine-types
* #compute-engine-machine-types).
*
* You must set this value when `scaleTier` is set to `CUSTOM`.
* @param masterType masterType or {@code null} for none
*/
public GoogleCloudMlV1TrainingInput setMasterType(java.lang.String masterType) {
this.masterType = masterType;
return this;
}
/**
* Optional. The maximum job running time. The default is 7 days.
* @return value or {@code null} for none
*/
public String getMaxRunningTime() {
return maxRunningTime;
}
/**
* Optional. The maximum job running time. The default is 7 days.
* @param maxRunningTime maxRunningTime or {@code null} for none
*/
public GoogleCloudMlV1TrainingInput setMaxRunningTime(String maxRunningTime) {
this.maxRunningTime = maxRunningTime;
return this;
}
/**
* Required. The Google Cloud Storage location of the packages with the training program and any
* additional dependencies. The maximum number of package URIs is 100.
* @return value or {@code null} for none
*/
public java.util.List<java.lang.String> getPackageUris() {
return packageUris;
}
/**
* Required. The Google Cloud Storage location of the packages with the training program and any
* additional dependencies. The maximum number of package URIs is 100.
* @param packageUris packageUris or {@code null} for none
*/
public GoogleCloudMlV1TrainingInput setPackageUris(java.util.List<java.lang.String> packageUris) {
this.packageUris = packageUris;
return this;
}
/**
* Optional. The configuration for parameter servers.
*
* You should only set `parameterServerConfig.acceleratorConfig` if `parameterServerConfigType` is
* set to a Compute Engine machine type. [Learn about restrictions on accelerator configurations
* for training.](/ml-engine/docs/tensorflow/using-gpus#compute-engine-machine-types-with-gpu)
*
* Set `parameterServerConfig.imageUri` only if you build a custom image for your parameter
* server. If `parameterServerConfig.imageUri` has not been set, AI Platform uses the value of
* `masterConfig.imageUri`. Learn more about [configuring custom containers](/ml-engine/docs
* /distributed-training-containers).
* @return value or {@code null} for none
*/
public GoogleCloudMlV1ReplicaConfig getParameterServerConfig() {
return parameterServerConfig;
}
/**
* Optional. The configuration for parameter servers.
*
* You should only set `parameterServerConfig.acceleratorConfig` if `parameterServerConfigType` is
* set to a Compute Engine machine type. [Learn about restrictions on accelerator configurations
* for training.](/ml-engine/docs/tensorflow/using-gpus#compute-engine-machine-types-with-gpu)
*
* Set `parameterServerConfig.imageUri` only if you build a custom image for your parameter
* server. If `parameterServerConfig.imageUri` has not been set, AI Platform uses the value of
* `masterConfig.imageUri`. Learn more about [configuring custom containers](/ml-engine/docs
* /distributed-training-containers).
* @param parameterServerConfig parameterServerConfig or {@code null} for none
*/
public GoogleCloudMlV1TrainingInput setParameterServerConfig(GoogleCloudMlV1ReplicaConfig parameterServerConfig) {
this.parameterServerConfig = parameterServerConfig;
return this;
}
/**
* Optional. The number of parameter server replicas to use for the training job. Each replica in
* the cluster will be of the type specified in `parameter_server_type`.
*
* This value can only be used when `scale_tier` is set to `CUSTOM`.If you set this value, you
* must also set `parameter_server_type`.
*
* The default value is zero.
* @return value or {@code null} for none
*/
public java.lang.Long getParameterServerCount() {
return parameterServerCount;
}
/**
* Optional. The number of parameter server replicas to use for the training job. Each replica in
* the cluster will be of the type specified in `parameter_server_type`.
*
* This value can only be used when `scale_tier` is set to `CUSTOM`.If you set this value, you
* must also set `parameter_server_type`.
*
* The default value is zero.
* @param parameterServerCount parameterServerCount or {@code null} for none
*/
public GoogleCloudMlV1TrainingInput setParameterServerCount(java.lang.Long parameterServerCount) {
this.parameterServerCount = parameterServerCount;
return this;
}
/**
* Optional. Specifies the type of virtual machine to use for your training job's parameter
* server.
*
* The supported values are the same as those described in the entry for `master_type`.
*
* This value must be consistent with the category of machine type that `masterType` uses. In
* other words, both must be AI Platform machine types or both must be Compute Engine machine
* types.
*
* This value must be present when `scaleTier` is set to `CUSTOM` and `parameter_server_count` is
* greater than zero.
* @return value or {@code null} for none
*/
public java.lang.String getParameterServerType() {
return parameterServerType;
}
/**
* Optional. Specifies the type of virtual machine to use for your training job's parameter
* server.
*
* The supported values are the same as those described in the entry for `master_type`.
*
* This value must be consistent with the category of machine type that `masterType` uses. In
* other words, both must be AI Platform machine types or both must be Compute Engine machine
* types.
*
* This value must be present when `scaleTier` is set to `CUSTOM` and `parameter_server_count` is
* greater than zero.
* @param parameterServerType parameterServerType or {@code null} for none
*/
public GoogleCloudMlV1TrainingInput setParameterServerType(java.lang.String parameterServerType) {
this.parameterServerType = parameterServerType;
return this;
}
/**
* Required. The Python module name to run after installing the packages.
* @return value or {@code null} for none
*/
public java.lang.String getPythonModule() {
return pythonModule;
}
/**
* Required. The Python module name to run after installing the packages.
* @param pythonModule pythonModule or {@code null} for none
*/
public GoogleCloudMlV1TrainingInput setPythonModule(java.lang.String pythonModule) {
this.pythonModule = pythonModule;
return this;
}
/**
* Optional. The version of Python used in training. If not set, the default version is '2.7'.
* Python '3.5' is available when `runtime_version` is set to '1.4' and above. Python '2.7' works
* with all supported runtime versions.
* @return value or {@code null} for none
*/
public java.lang.String getPythonVersion() {
return pythonVersion;
}
/**
* Optional. The version of Python used in training. If not set, the default version is '2.7'.
* Python '3.5' is available when `runtime_version` is set to '1.4' and above. Python '2.7' works
* with all supported runtime versions.
* @param pythonVersion pythonVersion or {@code null} for none
*/
public GoogleCloudMlV1TrainingInput setPythonVersion(java.lang.String pythonVersion) {
this.pythonVersion = pythonVersion;
return this;
}
/**
* Required. The Google Compute Engine region to run the training job in. See the available
* regions for AI Platform services.
* @return value or {@code null} for none
*/
public java.lang.String getRegion() {
return region;
}
/**
* Required. The Google Compute Engine region to run the training job in. See the available
* regions for AI Platform services.
* @param region region or {@code null} for none
*/
public GoogleCloudMlV1TrainingInput setRegion(java.lang.String region) {
this.region = region;
return this;
}
/**
* Optional. The AI Platform runtime version to use for training. If not set, AI Platform uses the
* default stable version, 1.0. For more information, see the runtime version list and how to
* manage runtime versions.
* @return value or {@code null} for none
*/
public java.lang.String getRuntimeVersion() {
return runtimeVersion;
}
/**
* Optional. The AI Platform runtime version to use for training. If not set, AI Platform uses the
* default stable version, 1.0. For more information, see the runtime version list and how to
* manage runtime versions.
* @param runtimeVersion runtimeVersion or {@code null} for none
*/
public GoogleCloudMlV1TrainingInput setRuntimeVersion(java.lang.String runtimeVersion) {
this.runtimeVersion = runtimeVersion;
return this;
}
/**
* Required. Specifies the machine types, the number of replicas for workers and parameter
* servers.
* @return value or {@code null} for none
*/
public java.lang.String getScaleTier() {
return scaleTier;
}
/**
* Required. Specifies the machine types, the number of replicas for workers and parameter
* servers.
* @param scaleTier scaleTier or {@code null} for none
*/
public GoogleCloudMlV1TrainingInput setScaleTier(java.lang.String scaleTier) {
this.scaleTier = scaleTier;
return this;
}
/**
* Optional. The configuration for workers.
*
* You should only set `workerConfig.acceleratorConfig` if `workerType` is set to a Compute Engine
* machine type. [Learn about restrictions on accelerator configurations for training.](/ml-
* engine/docs/tensorflow/using-gpus#compute-engine-machine-types-with-gpu)
*
* Set `workerConfig.imageUri` only if you build a custom image for your worker. If
* `workerConfig.imageUri` has not been set, AI Platform uses the value of
* `masterConfig.imageUri`. Learn more about [configuring custom containers](/ml-engine/docs
* /distributed-training-containers).
* @return value or {@code null} for none
*/
public GoogleCloudMlV1ReplicaConfig getWorkerConfig() {
return workerConfig;
}
/**
* Optional. The configuration for workers.
*
* You should only set `workerConfig.acceleratorConfig` if `workerType` is set to a Compute Engine
* machine type. [Learn about restrictions on accelerator configurations for training.](/ml-
* engine/docs/tensorflow/using-gpus#compute-engine-machine-types-with-gpu)
*
* Set `workerConfig.imageUri` only if you build a custom image for your worker. If
* `workerConfig.imageUri` has not been set, AI Platform uses the value of
* `masterConfig.imageUri`. Learn more about [configuring custom containers](/ml-engine/docs
* /distributed-training-containers).
* @param workerConfig workerConfig or {@code null} for none
*/
public GoogleCloudMlV1TrainingInput setWorkerConfig(GoogleCloudMlV1ReplicaConfig workerConfig) {
this.workerConfig = workerConfig;
return this;
}
/**
* Optional. The number of worker replicas to use for the training job. Each replica in the
* cluster will be of the type specified in `worker_type`.
*
* This value can only be used when `scale_tier` is set to `CUSTOM`. If you set this value, you
* must also set `worker_type`.
*
* The default value is zero.
* @return value or {@code null} for none
*/
public java.lang.Long getWorkerCount() {
return workerCount;
}
/**
* Optional. The number of worker replicas to use for the training job. Each replica in the
* cluster will be of the type specified in `worker_type`.
*
* This value can only be used when `scale_tier` is set to `CUSTOM`. If you set this value, you
* must also set `worker_type`.
*
* The default value is zero.
* @param workerCount workerCount or {@code null} for none
*/
public GoogleCloudMlV1TrainingInput setWorkerCount(java.lang.Long workerCount) {
this.workerCount = workerCount;
return this;
}
/**
* Optional. Specifies the type of virtual machine to use for your training job's worker nodes.
*
* The supported values are the same as those described in the entry for `masterType`.
*
* This value must be consistent with the category of machine type that `masterType` uses. In
* other words, both must be AI Platform machine types or both must be Compute Engine machine
* types.
*
* If you use `cloud_tpu` for this value, see special instructions for [configuring a custom TPU
* machine](/ml-engine/docs/tensorflow/using-tpus#configuring_a_custom_tpu_machine).
*
* This value must be present when `scaleTier` is set to `CUSTOM` and `workerCount` is greater
* than zero.
* @return value or {@code null} for none
*/
public java.lang.String getWorkerType() {
return workerType;
}
/**
* Optional. Specifies the type of virtual machine to use for your training job's worker nodes.
*
* The supported values are the same as those described in the entry for `masterType`.
*
* This value must be consistent with the category of machine type that `masterType` uses. In
* other words, both must be AI Platform machine types or both must be Compute Engine machine
* types.
*
* If you use `cloud_tpu` for this value, see special instructions for [configuring a custom TPU
* machine](/ml-engine/docs/tensorflow/using-tpus#configuring_a_custom_tpu_machine).
*
* This value must be present when `scaleTier` is set to `CUSTOM` and `workerCount` is greater
* than zero.
* @param workerType workerType or {@code null} for none
*/
public GoogleCloudMlV1TrainingInput setWorkerType(java.lang.String workerType) {
this.workerType = workerType;
return this;
}
@Override
public GoogleCloudMlV1TrainingInput set(String fieldName, Object value) {
return (GoogleCloudMlV1TrainingInput) super.set(fieldName, value);
}
@Override
public GoogleCloudMlV1TrainingInput clone() {
return (GoogleCloudMlV1TrainingInput) super.clone();
}
}
|
googleapis/google-api-java-client-services | 35,275 | clients/google-api-services-ml/v1/1.27.0/com/google/api/services/ml/v1/model/GoogleCloudMlV1TrainingInput.java | /*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.ml.v1.model;
/**
* Represents input parameters for a training job. When using the gcloud command to submit your
* training job, you can specify the input parameters as command-line arguments and/or in a YAML
* configuration file referenced from the --config command-line argument. For details, see the guide
* to submitting a training job.
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Cloud Machine Learning Engine. For a detailed
* explanation see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class GoogleCloudMlV1TrainingInput extends com.google.api.client.json.GenericJson {
/**
* Optional. Command line arguments to pass to the program.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<java.lang.String> args;
/**
* Optional. The set of Hyperparameters to tune.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private GoogleCloudMlV1HyperparameterSpec hyperparameters;
/**
* Optional. A Google Cloud Storage path in which to store training outputs and other data needed
* for training. This path is passed to your TensorFlow program as the '--job-dir' command-line
* argument. The benefit of specifying this field is that Cloud ML validates the path for use in
* training.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String jobDir;
/**
* Optional. The configuration for your master worker.
*
* You should only set `masterConfig.acceleratorConfig` if `masterType` is set to a Compute Engine
* machine type. Learn about [restrictions on accelerator configurations for training.](/ml-
* engine/docs/tensorflow/using-gpus#compute-engine-machine-types-with-gpu)
*
* Set `masterConfig.imageUri` only if you build a custom image. Only one of
* `masterConfig.imageUri` and `runtimeVersion` should be set. Learn more about [configuring
* custom containers](/ml-engine/docs/distributed-training-containers).
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private GoogleCloudMlV1ReplicaConfig masterConfig;
/**
* Optional. Specifies the type of virtual machine to use for your training job's master worker.
*
* The following types are supported:
*
* standard A basic machine configuration suitable for training simple models with small
* to moderate datasets. large_model A machine with a lot of memory, specially suited
* for parameter servers when your model is large (having many hidden layers or layers with very
* large numbers of nodes). complex_model_s A machine suitable for the master and
* workers of the cluster when your model requires more computation than the standard machine
* can handle satisfactorily. complex_model_m A machine with roughly twice the number
* of cores and roughly double the memory of complex_model_s. complex_model_l A
* machine with roughly twice the number of cores and roughly double the memory of
* complex_model_m. standard_gpu A machine equivalent to standard that also includes a
* single NVIDIA Tesla K80 GPU. See more about using GPUs to train your model.
* complex_model_m_gpu A machine equivalent to complex_model_m that also includes four
* NVIDIA Tesla K80 GPUs. complex_model_l_gpu A machine equivalent to complex_model_l
* that also includes eight NVIDIA Tesla K80 GPUs. standard_p100 A machine equivalent
* to standard that also includes a single NVIDIA Tesla P100 GPU. complex_model_m_p100
* A machine equivalent to complex_model_m that also includes four NVIDIA Tesla P100 GPUs.
* standard_v100 A machine equivalent to standard that also includes a single NVIDIA Tesla
* V100 GPU. large_model_v100 A machine equivalent to large_model that also includes a
* single NVIDIA Tesla V100 GPU. complex_model_m_v100 A machine equivalent to
* complex_model_m that also includes four NVIDIA Tesla V100 GPUs. complex_model_l_v100
* A machine equivalent to complex_model_l that also includes eight NVIDIA Tesla V100 GPUs.
* cloud_tpu A TPU VM including one Cloud TPU. See more about using TPUs to train your
* model.
*
* You may also use certain Compute Engine machine types directly in this field. The following
* types are supported:
*
* - `n1-standard-4` - `n1-standard-8` - `n1-standard-16` - `n1-standard-32` - `n1-standard-64` -
* `n1-standard-96` - `n1-highmem-2` - `n1-highmem-4` - `n1-highmem-8` - `n1-highmem-16` -
* `n1-highmem-32` - `n1-highmem-64` - `n1-highmem-96` - `n1-highcpu-16` - `n1-highcpu-32` -
* `n1-highcpu-64` - `n1-highcpu-96`
*
* See more about [using Compute Engine machine types](/ml-engine/docs/tensorflow/machine-types
* #compute-engine-machine-types).
*
* You must set this value when `scaleTier` is set to `CUSTOM`.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String masterType;
/**
* Optional. The maximum job running time. The default is 7 days.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private String maxRunningTime;
/**
* Required. The Google Cloud Storage location of the packages with the training program and any
* additional dependencies. The maximum number of package URIs is 100.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<java.lang.String> packageUris;
/**
* Optional. The configuration for parameter servers.
*
* You should only set `parameterServerConfig.acceleratorConfig` if `parameterServerConfigType` is
* set to a Compute Engine machine type. [Learn about restrictions on accelerator configurations
* for training.](/ml-engine/docs/tensorflow/using-gpus#compute-engine-machine-types-with-gpu)
*
* Set `parameterServerConfig.imageUri` only if you build a custom image for your parameter
* server. If `parameterServerConfig.imageUri` has not been set, AI Platform uses the value of
* `masterConfig.imageUri`. Learn more about [configuring custom containers](/ml-engine/docs
* /distributed-training-containers).
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private GoogleCloudMlV1ReplicaConfig parameterServerConfig;
/**
* Optional. The number of parameter server replicas to use for the training job. Each replica in
* the cluster will be of the type specified in `parameter_server_type`.
*
* This value can only be used when `scale_tier` is set to `CUSTOM`.If you set this value, you
* must also set `parameter_server_type`.
*
* The default value is zero.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long parameterServerCount;
/**
* Optional. Specifies the type of virtual machine to use for your training job's parameter
* server.
*
* The supported values are the same as those described in the entry for `master_type`.
*
* This value must be consistent with the category of machine type that `masterType` uses. In
* other words, both must be AI Platform machine types or both must be Compute Engine machine
* types.
*
* This value must be present when `scaleTier` is set to `CUSTOM` and `parameter_server_count` is
* greater than zero.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String parameterServerType;
/**
* Required. The Python module name to run after installing the packages.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String pythonModule;
/**
* Optional. The version of Python used in training. If not set, the default version is '2.7'.
* Python '3.5' is available when `runtime_version` is set to '1.4' and above. Python '2.7' works
* with all supported runtime versions.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String pythonVersion;
/**
* Required. The Google Compute Engine region to run the training job in. See the available
* regions for AI Platform services.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String region;
/**
* Optional. The AI Platform runtime version to use for training. If not set, AI Platform uses the
* default stable version, 1.0. For more information, see the runtime version list and how to
* manage runtime versions.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String runtimeVersion;
/**
* Required. Specifies the machine types, the number of replicas for workers and parameter
* servers.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String scaleTier;
/**
* Optional. The configuration for workers.
*
* You should only set `workerConfig.acceleratorConfig` if `workerType` is set to a Compute Engine
* machine type. [Learn about restrictions on accelerator configurations for training.](/ml-
* engine/docs/tensorflow/using-gpus#compute-engine-machine-types-with-gpu)
*
* Set `workerConfig.imageUri` only if you build a custom image for your worker. If
* `workerConfig.imageUri` has not been set, AI Platform uses the value of
* `masterConfig.imageUri`. Learn more about [configuring custom containers](/ml-engine/docs
* /distributed-training-containers).
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private GoogleCloudMlV1ReplicaConfig workerConfig;
/**
* Optional. The number of worker replicas to use for the training job. Each replica in the
* cluster will be of the type specified in `worker_type`.
*
* This value can only be used when `scale_tier` is set to `CUSTOM`. If you set this value, you
* must also set `worker_type`.
*
* The default value is zero.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long workerCount;
/**
* Optional. Specifies the type of virtual machine to use for your training job's worker nodes.
*
* The supported values are the same as those described in the entry for `masterType`.
*
* This value must be consistent with the category of machine type that `masterType` uses. In
* other words, both must be AI Platform machine types or both must be Compute Engine machine
* types.
*
* If you use `cloud_tpu` for this value, see special instructions for [configuring a custom TPU
* machine](/ml-engine/docs/tensorflow/using-tpus#configuring_a_custom_tpu_machine).
*
* This value must be present when `scaleTier` is set to `CUSTOM` and `workerCount` is greater
* than zero.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String workerType;
/**
* Optional. Command line arguments to pass to the program.
* @return value or {@code null} for none
*/
public java.util.List<java.lang.String> getArgs() {
return args;
}
/**
* Optional. Command line arguments to pass to the program.
* @param args args or {@code null} for none
*/
public GoogleCloudMlV1TrainingInput setArgs(java.util.List<java.lang.String> args) {
this.args = args;
return this;
}
/**
* Optional. The set of Hyperparameters to tune.
* @return value or {@code null} for none
*/
public GoogleCloudMlV1HyperparameterSpec getHyperparameters() {
return hyperparameters;
}
/**
* Optional. The set of Hyperparameters to tune.
* @param hyperparameters hyperparameters or {@code null} for none
*/
public GoogleCloudMlV1TrainingInput setHyperparameters(GoogleCloudMlV1HyperparameterSpec hyperparameters) {
this.hyperparameters = hyperparameters;
return this;
}
/**
* Optional. A Google Cloud Storage path in which to store training outputs and other data needed
* for training. This path is passed to your TensorFlow program as the '--job-dir' command-line
* argument. The benefit of specifying this field is that Cloud ML validates the path for use in
* training.
* @return value or {@code null} for none
*/
public java.lang.String getJobDir() {
return jobDir;
}
/**
* Optional. A Google Cloud Storage path in which to store training outputs and other data needed
* for training. This path is passed to your TensorFlow program as the '--job-dir' command-line
* argument. The benefit of specifying this field is that Cloud ML validates the path for use in
* training.
* @param jobDir jobDir or {@code null} for none
*/
public GoogleCloudMlV1TrainingInput setJobDir(java.lang.String jobDir) {
this.jobDir = jobDir;
return this;
}
/**
* Optional. The configuration for your master worker.
*
* You should only set `masterConfig.acceleratorConfig` if `masterType` is set to a Compute Engine
* machine type. Learn about [restrictions on accelerator configurations for training.](/ml-
* engine/docs/tensorflow/using-gpus#compute-engine-machine-types-with-gpu)
*
* Set `masterConfig.imageUri` only if you build a custom image. Only one of
* `masterConfig.imageUri` and `runtimeVersion` should be set. Learn more about [configuring
* custom containers](/ml-engine/docs/distributed-training-containers).
* @return value or {@code null} for none
*/
public GoogleCloudMlV1ReplicaConfig getMasterConfig() {
return masterConfig;
}
/**
* Optional. The configuration for your master worker.
*
* You should only set `masterConfig.acceleratorConfig` if `masterType` is set to a Compute Engine
* machine type. Learn about [restrictions on accelerator configurations for training.](/ml-
* engine/docs/tensorflow/using-gpus#compute-engine-machine-types-with-gpu)
*
* Set `masterConfig.imageUri` only if you build a custom image. Only one of
* `masterConfig.imageUri` and `runtimeVersion` should be set. Learn more about [configuring
* custom containers](/ml-engine/docs/distributed-training-containers).
* @param masterConfig masterConfig or {@code null} for none
*/
public GoogleCloudMlV1TrainingInput setMasterConfig(GoogleCloudMlV1ReplicaConfig masterConfig) {
this.masterConfig = masterConfig;
return this;
}
/**
* Optional. Specifies the type of virtual machine to use for your training job's master worker.
*
* The following types are supported:
*
* standard A basic machine configuration suitable for training simple models with small
* to moderate datasets. large_model A machine with a lot of memory, specially suited
* for parameter servers when your model is large (having many hidden layers or layers with very
* large numbers of nodes). complex_model_s A machine suitable for the master and
* workers of the cluster when your model requires more computation than the standard machine
* can handle satisfactorily. complex_model_m A machine with roughly twice the number
* of cores and roughly double the memory of complex_model_s. complex_model_l A
* machine with roughly twice the number of cores and roughly double the memory of
* complex_model_m. standard_gpu A machine equivalent to standard that also includes a
* single NVIDIA Tesla K80 GPU. See more about using GPUs to train your model.
* complex_model_m_gpu A machine equivalent to complex_model_m that also includes four
* NVIDIA Tesla K80 GPUs. complex_model_l_gpu A machine equivalent to complex_model_l
* that also includes eight NVIDIA Tesla K80 GPUs. standard_p100 A machine equivalent
* to standard that also includes a single NVIDIA Tesla P100 GPU. complex_model_m_p100
* A machine equivalent to complex_model_m that also includes four NVIDIA Tesla P100 GPUs.
* standard_v100 A machine equivalent to standard that also includes a single NVIDIA Tesla
* V100 GPU. large_model_v100 A machine equivalent to large_model that also includes a
* single NVIDIA Tesla V100 GPU. complex_model_m_v100 A machine equivalent to
* complex_model_m that also includes four NVIDIA Tesla V100 GPUs. complex_model_l_v100
* A machine equivalent to complex_model_l that also includes eight NVIDIA Tesla V100 GPUs.
* cloud_tpu A TPU VM including one Cloud TPU. See more about using TPUs to train your
* model.
*
* You may also use certain Compute Engine machine types directly in this field. The following
* types are supported:
*
* - `n1-standard-4` - `n1-standard-8` - `n1-standard-16` - `n1-standard-32` - `n1-standard-64` -
* `n1-standard-96` - `n1-highmem-2` - `n1-highmem-4` - `n1-highmem-8` - `n1-highmem-16` -
* `n1-highmem-32` - `n1-highmem-64` - `n1-highmem-96` - `n1-highcpu-16` - `n1-highcpu-32` -
* `n1-highcpu-64` - `n1-highcpu-96`
*
* See more about [using Compute Engine machine types](/ml-engine/docs/tensorflow/machine-types
* #compute-engine-machine-types).
*
* You must set this value when `scaleTier` is set to `CUSTOM`.
* @return value or {@code null} for none
*/
public java.lang.String getMasterType() {
return masterType;
}
/**
* Optional. Specifies the type of virtual machine to use for your training job's master worker.
*
* The following types are supported:
*
* standard A basic machine configuration suitable for training simple models with small
* to moderate datasets. large_model A machine with a lot of memory, specially suited
* for parameter servers when your model is large (having many hidden layers or layers with very
* large numbers of nodes). complex_model_s A machine suitable for the master and
* workers of the cluster when your model requires more computation than the standard machine
* can handle satisfactorily. complex_model_m A machine with roughly twice the number
* of cores and roughly double the memory of complex_model_s. complex_model_l A
* machine with roughly twice the number of cores and roughly double the memory of
* complex_model_m. standard_gpu A machine equivalent to standard that also includes a
* single NVIDIA Tesla K80 GPU. See more about using GPUs to train your model.
* complex_model_m_gpu A machine equivalent to complex_model_m that also includes four
* NVIDIA Tesla K80 GPUs. complex_model_l_gpu A machine equivalent to complex_model_l
* that also includes eight NVIDIA Tesla K80 GPUs. standard_p100 A machine equivalent
* to standard that also includes a single NVIDIA Tesla P100 GPU. complex_model_m_p100
* A machine equivalent to complex_model_m that also includes four NVIDIA Tesla P100 GPUs.
* standard_v100 A machine equivalent to standard that also includes a single NVIDIA Tesla
* V100 GPU. large_model_v100 A machine equivalent to large_model that also includes a
* single NVIDIA Tesla V100 GPU. complex_model_m_v100 A machine equivalent to
* complex_model_m that also includes four NVIDIA Tesla V100 GPUs. complex_model_l_v100
* A machine equivalent to complex_model_l that also includes eight NVIDIA Tesla V100 GPUs.
* cloud_tpu A TPU VM including one Cloud TPU. See more about using TPUs to train your
* model.
*
* You may also use certain Compute Engine machine types directly in this field. The following
* types are supported:
*
* - `n1-standard-4` - `n1-standard-8` - `n1-standard-16` - `n1-standard-32` - `n1-standard-64` -
* `n1-standard-96` - `n1-highmem-2` - `n1-highmem-4` - `n1-highmem-8` - `n1-highmem-16` -
* `n1-highmem-32` - `n1-highmem-64` - `n1-highmem-96` - `n1-highcpu-16` - `n1-highcpu-32` -
* `n1-highcpu-64` - `n1-highcpu-96`
*
* See more about [using Compute Engine machine types](/ml-engine/docs/tensorflow/machine-types
* #compute-engine-machine-types).
*
* You must set this value when `scaleTier` is set to `CUSTOM`.
* @param masterType masterType or {@code null} for none
*/
public GoogleCloudMlV1TrainingInput setMasterType(java.lang.String masterType) {
this.masterType = masterType;
return this;
}
/**
* Optional. The maximum job running time. The default is 7 days.
* @return value or {@code null} for none
*/
public String getMaxRunningTime() {
return maxRunningTime;
}
/**
* Optional. The maximum job running time. The default is 7 days.
* @param maxRunningTime maxRunningTime or {@code null} for none
*/
public GoogleCloudMlV1TrainingInput setMaxRunningTime(String maxRunningTime) {
this.maxRunningTime = maxRunningTime;
return this;
}
/**
* Required. The Google Cloud Storage location of the packages with the training program and any
* additional dependencies. The maximum number of package URIs is 100.
* @return value or {@code null} for none
*/
public java.util.List<java.lang.String> getPackageUris() {
return packageUris;
}
/**
* Required. The Google Cloud Storage location of the packages with the training program and any
* additional dependencies. The maximum number of package URIs is 100.
* @param packageUris packageUris or {@code null} for none
*/
public GoogleCloudMlV1TrainingInput setPackageUris(java.util.List<java.lang.String> packageUris) {
this.packageUris = packageUris;
return this;
}
/**
* Optional. The configuration for parameter servers.
*
* You should only set `parameterServerConfig.acceleratorConfig` if `parameterServerConfigType` is
* set to a Compute Engine machine type. [Learn about restrictions on accelerator configurations
* for training.](/ml-engine/docs/tensorflow/using-gpus#compute-engine-machine-types-with-gpu)
*
* Set `parameterServerConfig.imageUri` only if you build a custom image for your parameter
* server. If `parameterServerConfig.imageUri` has not been set, AI Platform uses the value of
* `masterConfig.imageUri`. Learn more about [configuring custom containers](/ml-engine/docs
* /distributed-training-containers).
* @return value or {@code null} for none
*/
public GoogleCloudMlV1ReplicaConfig getParameterServerConfig() {
return parameterServerConfig;
}
/**
* Optional. The configuration for parameter servers.
*
* You should only set `parameterServerConfig.acceleratorConfig` if `parameterServerConfigType` is
* set to a Compute Engine machine type. [Learn about restrictions on accelerator configurations
* for training.](/ml-engine/docs/tensorflow/using-gpus#compute-engine-machine-types-with-gpu)
*
* Set `parameterServerConfig.imageUri` only if you build a custom image for your parameter
* server. If `parameterServerConfig.imageUri` has not been set, AI Platform uses the value of
* `masterConfig.imageUri`. Learn more about [configuring custom containers](/ml-engine/docs
* /distributed-training-containers).
* @param parameterServerConfig parameterServerConfig or {@code null} for none
*/
public GoogleCloudMlV1TrainingInput setParameterServerConfig(GoogleCloudMlV1ReplicaConfig parameterServerConfig) {
this.parameterServerConfig = parameterServerConfig;
return this;
}
/**
* Optional. The number of parameter server replicas to use for the training job. Each replica in
* the cluster will be of the type specified in `parameter_server_type`.
*
* This value can only be used when `scale_tier` is set to `CUSTOM`.If you set this value, you
* must also set `parameter_server_type`.
*
* The default value is zero.
* @return value or {@code null} for none
*/
public java.lang.Long getParameterServerCount() {
return parameterServerCount;
}
/**
* Optional. The number of parameter server replicas to use for the training job. Each replica in
* the cluster will be of the type specified in `parameter_server_type`.
*
* This value can only be used when `scale_tier` is set to `CUSTOM`.If you set this value, you
* must also set `parameter_server_type`.
*
* The default value is zero.
* @param parameterServerCount parameterServerCount or {@code null} for none
*/
public GoogleCloudMlV1TrainingInput setParameterServerCount(java.lang.Long parameterServerCount) {
this.parameterServerCount = parameterServerCount;
return this;
}
/**
* Optional. Specifies the type of virtual machine to use for your training job's parameter
* server.
*
* The supported values are the same as those described in the entry for `master_type`.
*
* This value must be consistent with the category of machine type that `masterType` uses. In
* other words, both must be AI Platform machine types or both must be Compute Engine machine
* types.
*
* This value must be present when `scaleTier` is set to `CUSTOM` and `parameter_server_count` is
* greater than zero.
* @return value or {@code null} for none
*/
public java.lang.String getParameterServerType() {
return parameterServerType;
}
/**
* Optional. Specifies the type of virtual machine to use for your training job's parameter
* server.
*
* The supported values are the same as those described in the entry for `master_type`.
*
* This value must be consistent with the category of machine type that `masterType` uses. In
* other words, both must be AI Platform machine types or both must be Compute Engine machine
* types.
*
* This value must be present when `scaleTier` is set to `CUSTOM` and `parameter_server_count` is
* greater than zero.
* @param parameterServerType parameterServerType or {@code null} for none
*/
public GoogleCloudMlV1TrainingInput setParameterServerType(java.lang.String parameterServerType) {
this.parameterServerType = parameterServerType;
return this;
}
/**
* Required. The Python module name to run after installing the packages.
* @return value or {@code null} for none
*/
public java.lang.String getPythonModule() {
return pythonModule;
}
/**
* Required. The Python module name to run after installing the packages.
* @param pythonModule pythonModule or {@code null} for none
*/
public GoogleCloudMlV1TrainingInput setPythonModule(java.lang.String pythonModule) {
this.pythonModule = pythonModule;
return this;
}
/**
* Optional. The version of Python used in training. If not set, the default version is '2.7'.
* Python '3.5' is available when `runtime_version` is set to '1.4' and above. Python '2.7' works
* with all supported runtime versions.
* @return value or {@code null} for none
*/
public java.lang.String getPythonVersion() {
return pythonVersion;
}
/**
* Optional. The version of Python used in training. If not set, the default version is '2.7'.
* Python '3.5' is available when `runtime_version` is set to '1.4' and above. Python '2.7' works
* with all supported runtime versions.
* @param pythonVersion pythonVersion or {@code null} for none
*/
public GoogleCloudMlV1TrainingInput setPythonVersion(java.lang.String pythonVersion) {
this.pythonVersion = pythonVersion;
return this;
}
/**
* Required. The Google Compute Engine region to run the training job in. See the available
* regions for AI Platform services.
* @return value or {@code null} for none
*/
public java.lang.String getRegion() {
return region;
}
/**
* Required. The Google Compute Engine region to run the training job in. See the available
* regions for AI Platform services.
* @param region region or {@code null} for none
*/
public GoogleCloudMlV1TrainingInput setRegion(java.lang.String region) {
this.region = region;
return this;
}
/**
* Optional. The AI Platform runtime version to use for training. If not set, AI Platform uses the
* default stable version, 1.0. For more information, see the runtime version list and how to
* manage runtime versions.
* @return value or {@code null} for none
*/
public java.lang.String getRuntimeVersion() {
return runtimeVersion;
}
/**
* Optional. The AI Platform runtime version to use for training. If not set, AI Platform uses the
* default stable version, 1.0. For more information, see the runtime version list and how to
* manage runtime versions.
* @param runtimeVersion runtimeVersion or {@code null} for none
*/
public GoogleCloudMlV1TrainingInput setRuntimeVersion(java.lang.String runtimeVersion) {
this.runtimeVersion = runtimeVersion;
return this;
}
/**
* Required. Specifies the machine types, the number of replicas for workers and parameter
* servers.
* @return value or {@code null} for none
*/
public java.lang.String getScaleTier() {
return scaleTier;
}
/**
* Required. Specifies the machine types, the number of replicas for workers and parameter
* servers.
* @param scaleTier scaleTier or {@code null} for none
*/
public GoogleCloudMlV1TrainingInput setScaleTier(java.lang.String scaleTier) {
this.scaleTier = scaleTier;
return this;
}
/**
* Optional. The configuration for workers.
*
* You should only set `workerConfig.acceleratorConfig` if `workerType` is set to a Compute Engine
* machine type. [Learn about restrictions on accelerator configurations for training.](/ml-
* engine/docs/tensorflow/using-gpus#compute-engine-machine-types-with-gpu)
*
* Set `workerConfig.imageUri` only if you build a custom image for your worker. If
* `workerConfig.imageUri` has not been set, AI Platform uses the value of
* `masterConfig.imageUri`. Learn more about [configuring custom containers](/ml-engine/docs
* /distributed-training-containers).
* @return value or {@code null} for none
*/
public GoogleCloudMlV1ReplicaConfig getWorkerConfig() {
return workerConfig;
}
/**
* Optional. The configuration for workers.
*
* You should only set `workerConfig.acceleratorConfig` if `workerType` is set to a Compute Engine
* machine type. [Learn about restrictions on accelerator configurations for training.](/ml-
* engine/docs/tensorflow/using-gpus#compute-engine-machine-types-with-gpu)
*
* Set `workerConfig.imageUri` only if you build a custom image for your worker. If
* `workerConfig.imageUri` has not been set, AI Platform uses the value of
* `masterConfig.imageUri`. Learn more about [configuring custom containers](/ml-engine/docs
* /distributed-training-containers).
* @param workerConfig workerConfig or {@code null} for none
*/
public GoogleCloudMlV1TrainingInput setWorkerConfig(GoogleCloudMlV1ReplicaConfig workerConfig) {
this.workerConfig = workerConfig;
return this;
}
/**
* Optional. The number of worker replicas to use for the training job. Each replica in the
* cluster will be of the type specified in `worker_type`.
*
* This value can only be used when `scale_tier` is set to `CUSTOM`. If you set this value, you
* must also set `worker_type`.
*
* The default value is zero.
* @return value or {@code null} for none
*/
public java.lang.Long getWorkerCount() {
return workerCount;
}
/**
* Optional. The number of worker replicas to use for the training job. Each replica in the
* cluster will be of the type specified in `worker_type`.
*
* This value can only be used when `scale_tier` is set to `CUSTOM`. If you set this value, you
* must also set `worker_type`.
*
* The default value is zero.
* @param workerCount workerCount or {@code null} for none
*/
public GoogleCloudMlV1TrainingInput setWorkerCount(java.lang.Long workerCount) {
this.workerCount = workerCount;
return this;
}
/**
* Optional. Specifies the type of virtual machine to use for your training job's worker nodes.
*
* The supported values are the same as those described in the entry for `masterType`.
*
* This value must be consistent with the category of machine type that `masterType` uses. In
* other words, both must be AI Platform machine types or both must be Compute Engine machine
* types.
*
* If you use `cloud_tpu` for this value, see special instructions for [configuring a custom TPU
* machine](/ml-engine/docs/tensorflow/using-tpus#configuring_a_custom_tpu_machine).
*
* This value must be present when `scaleTier` is set to `CUSTOM` and `workerCount` is greater
* than zero.
* @return value or {@code null} for none
*/
public java.lang.String getWorkerType() {
return workerType;
}
/**
* Optional. Specifies the type of virtual machine to use for your training job's worker nodes.
*
* The supported values are the same as those described in the entry for `masterType`.
*
* This value must be consistent with the category of machine type that `masterType` uses. In
* other words, both must be AI Platform machine types or both must be Compute Engine machine
* types.
*
* If you use `cloud_tpu` for this value, see special instructions for [configuring a custom TPU
* machine](/ml-engine/docs/tensorflow/using-tpus#configuring_a_custom_tpu_machine).
*
* This value must be present when `scaleTier` is set to `CUSTOM` and `workerCount` is greater
* than zero.
* @param workerType workerType or {@code null} for none
*/
public GoogleCloudMlV1TrainingInput setWorkerType(java.lang.String workerType) {
this.workerType = workerType;
return this;
}
@Override
public GoogleCloudMlV1TrainingInput set(String fieldName, Object value) {
return (GoogleCloudMlV1TrainingInput) super.set(fieldName, value);
}
@Override
public GoogleCloudMlV1TrainingInput clone() {
return (GoogleCloudMlV1TrainingInput) super.clone();
}
}
|
googleapis/google-cloud-java | 34,916 | java-translate/proto-google-cloud-translate-v3/src/main/java/com/google/cloud/translate/v3/ListModelsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/translate/v3/automl_translation.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.translate.v3;
/**
*
*
* <pre>
* Response message for ListModels.
* </pre>
*
* Protobuf type {@code google.cloud.translation.v3.ListModelsResponse}
*/
public final class ListModelsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.translation.v3.ListModelsResponse)
ListModelsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListModelsResponse.newBuilder() to construct.
private ListModelsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListModelsResponse() {
models_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListModelsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.translate.v3.AutoMLTranslationProto
.internal_static_google_cloud_translation_v3_ListModelsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.translate.v3.AutoMLTranslationProto
.internal_static_google_cloud_translation_v3_ListModelsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.translate.v3.ListModelsResponse.class,
com.google.cloud.translate.v3.ListModelsResponse.Builder.class);
}
public static final int MODELS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.translate.v3.Model> models_;
/**
*
*
* <pre>
* The models read.
* </pre>
*
* <code>repeated .google.cloud.translation.v3.Model models = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.translate.v3.Model> getModelsList() {
return models_;
}
/**
*
*
* <pre>
* The models read.
* </pre>
*
* <code>repeated .google.cloud.translation.v3.Model models = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.translate.v3.ModelOrBuilder>
getModelsOrBuilderList() {
return models_;
}
/**
*
*
* <pre>
* The models read.
* </pre>
*
* <code>repeated .google.cloud.translation.v3.Model models = 1;</code>
*/
@java.lang.Override
public int getModelsCount() {
return models_.size();
}
/**
*
*
* <pre>
* The models read.
* </pre>
*
* <code>repeated .google.cloud.translation.v3.Model models = 1;</code>
*/
@java.lang.Override
public com.google.cloud.translate.v3.Model getModels(int index) {
return models_.get(index);
}
/**
*
*
* <pre>
* The models read.
* </pre>
*
* <code>repeated .google.cloud.translation.v3.Model models = 1;</code>
*/
@java.lang.Override
public com.google.cloud.translate.v3.ModelOrBuilder getModelsOrBuilder(int index) {
return models_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token to retrieve next page of results.
* Pass this token to the page_token field in the ListModelsRequest to
* obtain the corresponding page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token to retrieve next page of results.
* Pass this token to the page_token field in the ListModelsRequest to
* obtain the corresponding page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < models_.size(); i++) {
output.writeMessage(1, models_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < models_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, models_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.translate.v3.ListModelsResponse)) {
return super.equals(obj);
}
com.google.cloud.translate.v3.ListModelsResponse other =
(com.google.cloud.translate.v3.ListModelsResponse) obj;
if (!getModelsList().equals(other.getModelsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getModelsCount() > 0) {
hash = (37 * hash) + MODELS_FIELD_NUMBER;
hash = (53 * hash) + getModelsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.translate.v3.ListModelsResponse parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.translate.v3.ListModelsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.translate.v3.ListModelsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.translate.v3.ListModelsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.translate.v3.ListModelsResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.translate.v3.ListModelsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.translate.v3.ListModelsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.translate.v3.ListModelsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.translate.v3.ListModelsResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.translate.v3.ListModelsResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.translate.v3.ListModelsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.translate.v3.ListModelsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.translate.v3.ListModelsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for ListModels.
* </pre>
*
* Protobuf type {@code google.cloud.translation.v3.ListModelsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.translation.v3.ListModelsResponse)
com.google.cloud.translate.v3.ListModelsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.translate.v3.AutoMLTranslationProto
.internal_static_google_cloud_translation_v3_ListModelsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.translate.v3.AutoMLTranslationProto
.internal_static_google_cloud_translation_v3_ListModelsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.translate.v3.ListModelsResponse.class,
com.google.cloud.translate.v3.ListModelsResponse.Builder.class);
}
// Construct using com.google.cloud.translate.v3.ListModelsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (modelsBuilder_ == null) {
models_ = java.util.Collections.emptyList();
} else {
models_ = null;
modelsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.translate.v3.AutoMLTranslationProto
.internal_static_google_cloud_translation_v3_ListModelsResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.translate.v3.ListModelsResponse getDefaultInstanceForType() {
return com.google.cloud.translate.v3.ListModelsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.translate.v3.ListModelsResponse build() {
com.google.cloud.translate.v3.ListModelsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.translate.v3.ListModelsResponse buildPartial() {
com.google.cloud.translate.v3.ListModelsResponse result =
new com.google.cloud.translate.v3.ListModelsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.translate.v3.ListModelsResponse result) {
if (modelsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
models_ = java.util.Collections.unmodifiableList(models_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.models_ = models_;
} else {
result.models_ = modelsBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.translate.v3.ListModelsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.translate.v3.ListModelsResponse) {
return mergeFrom((com.google.cloud.translate.v3.ListModelsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.translate.v3.ListModelsResponse other) {
if (other == com.google.cloud.translate.v3.ListModelsResponse.getDefaultInstance())
return this;
if (modelsBuilder_ == null) {
if (!other.models_.isEmpty()) {
if (models_.isEmpty()) {
models_ = other.models_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureModelsIsMutable();
models_.addAll(other.models_);
}
onChanged();
}
} else {
if (!other.models_.isEmpty()) {
if (modelsBuilder_.isEmpty()) {
modelsBuilder_.dispose();
modelsBuilder_ = null;
models_ = other.models_;
bitField0_ = (bitField0_ & ~0x00000001);
modelsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getModelsFieldBuilder()
: null;
} else {
modelsBuilder_.addAllMessages(other.models_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.translate.v3.Model m =
input.readMessage(
com.google.cloud.translate.v3.Model.parser(), extensionRegistry);
if (modelsBuilder_ == null) {
ensureModelsIsMutable();
models_.add(m);
} else {
modelsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.translate.v3.Model> models_ =
java.util.Collections.emptyList();
private void ensureModelsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
models_ = new java.util.ArrayList<com.google.cloud.translate.v3.Model>(models_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.translate.v3.Model,
com.google.cloud.translate.v3.Model.Builder,
com.google.cloud.translate.v3.ModelOrBuilder>
modelsBuilder_;
/**
*
*
* <pre>
* The models read.
* </pre>
*
* <code>repeated .google.cloud.translation.v3.Model models = 1;</code>
*/
public java.util.List<com.google.cloud.translate.v3.Model> getModelsList() {
if (modelsBuilder_ == null) {
return java.util.Collections.unmodifiableList(models_);
} else {
return modelsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The models read.
* </pre>
*
* <code>repeated .google.cloud.translation.v3.Model models = 1;</code>
*/
public int getModelsCount() {
if (modelsBuilder_ == null) {
return models_.size();
} else {
return modelsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The models read.
* </pre>
*
* <code>repeated .google.cloud.translation.v3.Model models = 1;</code>
*/
public com.google.cloud.translate.v3.Model getModels(int index) {
if (modelsBuilder_ == null) {
return models_.get(index);
} else {
return modelsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The models read.
* </pre>
*
* <code>repeated .google.cloud.translation.v3.Model models = 1;</code>
*/
public Builder setModels(int index, com.google.cloud.translate.v3.Model value) {
if (modelsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureModelsIsMutable();
models_.set(index, value);
onChanged();
} else {
modelsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The models read.
* </pre>
*
* <code>repeated .google.cloud.translation.v3.Model models = 1;</code>
*/
public Builder setModels(
int index, com.google.cloud.translate.v3.Model.Builder builderForValue) {
if (modelsBuilder_ == null) {
ensureModelsIsMutable();
models_.set(index, builderForValue.build());
onChanged();
} else {
modelsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The models read.
* </pre>
*
* <code>repeated .google.cloud.translation.v3.Model models = 1;</code>
*/
public Builder addModels(com.google.cloud.translate.v3.Model value) {
if (modelsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureModelsIsMutable();
models_.add(value);
onChanged();
} else {
modelsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The models read.
* </pre>
*
* <code>repeated .google.cloud.translation.v3.Model models = 1;</code>
*/
public Builder addModels(int index, com.google.cloud.translate.v3.Model value) {
if (modelsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureModelsIsMutable();
models_.add(index, value);
onChanged();
} else {
modelsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The models read.
* </pre>
*
* <code>repeated .google.cloud.translation.v3.Model models = 1;</code>
*/
public Builder addModels(com.google.cloud.translate.v3.Model.Builder builderForValue) {
if (modelsBuilder_ == null) {
ensureModelsIsMutable();
models_.add(builderForValue.build());
onChanged();
} else {
modelsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The models read.
* </pre>
*
* <code>repeated .google.cloud.translation.v3.Model models = 1;</code>
*/
public Builder addModels(
int index, com.google.cloud.translate.v3.Model.Builder builderForValue) {
if (modelsBuilder_ == null) {
ensureModelsIsMutable();
models_.add(index, builderForValue.build());
onChanged();
} else {
modelsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The models read.
* </pre>
*
* <code>repeated .google.cloud.translation.v3.Model models = 1;</code>
*/
public Builder addAllModels(
java.lang.Iterable<? extends com.google.cloud.translate.v3.Model> values) {
if (modelsBuilder_ == null) {
ensureModelsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, models_);
onChanged();
} else {
modelsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The models read.
* </pre>
*
* <code>repeated .google.cloud.translation.v3.Model models = 1;</code>
*/
public Builder clearModels() {
if (modelsBuilder_ == null) {
models_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
modelsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The models read.
* </pre>
*
* <code>repeated .google.cloud.translation.v3.Model models = 1;</code>
*/
public Builder removeModels(int index) {
if (modelsBuilder_ == null) {
ensureModelsIsMutable();
models_.remove(index);
onChanged();
} else {
modelsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The models read.
* </pre>
*
* <code>repeated .google.cloud.translation.v3.Model models = 1;</code>
*/
public com.google.cloud.translate.v3.Model.Builder getModelsBuilder(int index) {
return getModelsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The models read.
* </pre>
*
* <code>repeated .google.cloud.translation.v3.Model models = 1;</code>
*/
public com.google.cloud.translate.v3.ModelOrBuilder getModelsOrBuilder(int index) {
if (modelsBuilder_ == null) {
return models_.get(index);
} else {
return modelsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The models read.
* </pre>
*
* <code>repeated .google.cloud.translation.v3.Model models = 1;</code>
*/
public java.util.List<? extends com.google.cloud.translate.v3.ModelOrBuilder>
getModelsOrBuilderList() {
if (modelsBuilder_ != null) {
return modelsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(models_);
}
}
/**
*
*
* <pre>
* The models read.
* </pre>
*
* <code>repeated .google.cloud.translation.v3.Model models = 1;</code>
*/
public com.google.cloud.translate.v3.Model.Builder addModelsBuilder() {
return getModelsFieldBuilder()
.addBuilder(com.google.cloud.translate.v3.Model.getDefaultInstance());
}
/**
*
*
* <pre>
* The models read.
* </pre>
*
* <code>repeated .google.cloud.translation.v3.Model models = 1;</code>
*/
public com.google.cloud.translate.v3.Model.Builder addModelsBuilder(int index) {
return getModelsFieldBuilder()
.addBuilder(index, com.google.cloud.translate.v3.Model.getDefaultInstance());
}
/**
*
*
* <pre>
* The models read.
* </pre>
*
* <code>repeated .google.cloud.translation.v3.Model models = 1;</code>
*/
public java.util.List<com.google.cloud.translate.v3.Model.Builder> getModelsBuilderList() {
return getModelsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.translate.v3.Model,
com.google.cloud.translate.v3.Model.Builder,
com.google.cloud.translate.v3.ModelOrBuilder>
getModelsFieldBuilder() {
if (modelsBuilder_ == null) {
modelsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.translate.v3.Model,
com.google.cloud.translate.v3.Model.Builder,
com.google.cloud.translate.v3.ModelOrBuilder>(
models_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
models_ = null;
}
return modelsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token to retrieve next page of results.
* Pass this token to the page_token field in the ListModelsRequest to
* obtain the corresponding page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token to retrieve next page of results.
* Pass this token to the page_token field in the ListModelsRequest to
* obtain the corresponding page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token to retrieve next page of results.
* Pass this token to the page_token field in the ListModelsRequest to
* obtain the corresponding page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token to retrieve next page of results.
* Pass this token to the page_token field in the ListModelsRequest to
* obtain the corresponding page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token to retrieve next page of results.
* Pass this token to the page_token field in the ListModelsRequest to
* obtain the corresponding page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.translation.v3.ListModelsResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.translation.v3.ListModelsResponse)
private static final com.google.cloud.translate.v3.ListModelsResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.translate.v3.ListModelsResponse();
}
public static com.google.cloud.translate.v3.ListModelsResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListModelsResponse> PARSER =
new com.google.protobuf.AbstractParser<ListModelsResponse>() {
@java.lang.Override
public ListModelsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListModelsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListModelsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.translate.v3.ListModelsResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 34,999 | java-artifact-registry/proto-google-cloud-artifact-registry-v1beta2/src/main/java/com/google/devtools/artifactregistry/v1beta2/UpdateRepositoryRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/devtools/artifactregistry/v1beta2/repository.proto
// Protobuf Java Version: 3.25.8
package com.google.devtools.artifactregistry.v1beta2;
/**
*
*
* <pre>
* The request to update a repository.
* </pre>
*
* Protobuf type {@code google.devtools.artifactregistry.v1beta2.UpdateRepositoryRequest}
*/
public final class UpdateRepositoryRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.devtools.artifactregistry.v1beta2.UpdateRepositoryRequest)
UpdateRepositoryRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateRepositoryRequest.newBuilder() to construct.
private UpdateRepositoryRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateRepositoryRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateRepositoryRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.devtools.artifactregistry.v1beta2.RepositoryProto
.internal_static_google_devtools_artifactregistry_v1beta2_UpdateRepositoryRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.devtools.artifactregistry.v1beta2.RepositoryProto
.internal_static_google_devtools_artifactregistry_v1beta2_UpdateRepositoryRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.devtools.artifactregistry.v1beta2.UpdateRepositoryRequest.class,
com.google.devtools.artifactregistry.v1beta2.UpdateRepositoryRequest.Builder.class);
}
private int bitField0_;
public static final int REPOSITORY_FIELD_NUMBER = 1;
private com.google.devtools.artifactregistry.v1beta2.Repository repository_;
/**
*
*
* <pre>
* The repository that replaces the resource on the server.
* </pre>
*
* <code>.google.devtools.artifactregistry.v1beta2.Repository repository = 1;</code>
*
* @return Whether the repository field is set.
*/
@java.lang.Override
public boolean hasRepository() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* The repository that replaces the resource on the server.
* </pre>
*
* <code>.google.devtools.artifactregistry.v1beta2.Repository repository = 1;</code>
*
* @return The repository.
*/
@java.lang.Override
public com.google.devtools.artifactregistry.v1beta2.Repository getRepository() {
return repository_ == null
? com.google.devtools.artifactregistry.v1beta2.Repository.getDefaultInstance()
: repository_;
}
/**
*
*
* <pre>
* The repository that replaces the resource on the server.
* </pre>
*
* <code>.google.devtools.artifactregistry.v1beta2.Repository repository = 1;</code>
*/
@java.lang.Override
public com.google.devtools.artifactregistry.v1beta2.RepositoryOrBuilder getRepositoryOrBuilder() {
return repository_ == null
? com.google.devtools.artifactregistry.v1beta2.Repository.getDefaultInstance()
: repository_;
}
public static final int UPDATE_MASK_FIELD_NUMBER = 2;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* The update mask applies to the resource. For the `FieldMask` definition,
* see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* The update mask applies to the resource. For the `FieldMask` definition,
* see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* The update mask applies to the resource. For the `FieldMask` definition,
* see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getRepository());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getUpdateMask());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getRepository());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.devtools.artifactregistry.v1beta2.UpdateRepositoryRequest)) {
return super.equals(obj);
}
com.google.devtools.artifactregistry.v1beta2.UpdateRepositoryRequest other =
(com.google.devtools.artifactregistry.v1beta2.UpdateRepositoryRequest) obj;
if (hasRepository() != other.hasRepository()) return false;
if (hasRepository()) {
if (!getRepository().equals(other.getRepository())) return false;
}
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasRepository()) {
hash = (37 * hash) + REPOSITORY_FIELD_NUMBER;
hash = (53 * hash) + getRepository().hashCode();
}
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.devtools.artifactregistry.v1beta2.UpdateRepositoryRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.devtools.artifactregistry.v1beta2.UpdateRepositoryRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.devtools.artifactregistry.v1beta2.UpdateRepositoryRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.devtools.artifactregistry.v1beta2.UpdateRepositoryRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.devtools.artifactregistry.v1beta2.UpdateRepositoryRequest parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.devtools.artifactregistry.v1beta2.UpdateRepositoryRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.devtools.artifactregistry.v1beta2.UpdateRepositoryRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.devtools.artifactregistry.v1beta2.UpdateRepositoryRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.devtools.artifactregistry.v1beta2.UpdateRepositoryRequest
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.devtools.artifactregistry.v1beta2.UpdateRepositoryRequest
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.devtools.artifactregistry.v1beta2.UpdateRepositoryRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.devtools.artifactregistry.v1beta2.UpdateRepositoryRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.devtools.artifactregistry.v1beta2.UpdateRepositoryRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The request to update a repository.
* </pre>
*
* Protobuf type {@code google.devtools.artifactregistry.v1beta2.UpdateRepositoryRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.devtools.artifactregistry.v1beta2.UpdateRepositoryRequest)
com.google.devtools.artifactregistry.v1beta2.UpdateRepositoryRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.devtools.artifactregistry.v1beta2.RepositoryProto
.internal_static_google_devtools_artifactregistry_v1beta2_UpdateRepositoryRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.devtools.artifactregistry.v1beta2.RepositoryProto
.internal_static_google_devtools_artifactregistry_v1beta2_UpdateRepositoryRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.devtools.artifactregistry.v1beta2.UpdateRepositoryRequest.class,
com.google.devtools.artifactregistry.v1beta2.UpdateRepositoryRequest.Builder.class);
}
// Construct using
// com.google.devtools.artifactregistry.v1beta2.UpdateRepositoryRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getRepositoryFieldBuilder();
getUpdateMaskFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
repository_ = null;
if (repositoryBuilder_ != null) {
repositoryBuilder_.dispose();
repositoryBuilder_ = null;
}
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.devtools.artifactregistry.v1beta2.RepositoryProto
.internal_static_google_devtools_artifactregistry_v1beta2_UpdateRepositoryRequest_descriptor;
}
@java.lang.Override
public com.google.devtools.artifactregistry.v1beta2.UpdateRepositoryRequest
getDefaultInstanceForType() {
return com.google.devtools.artifactregistry.v1beta2.UpdateRepositoryRequest
.getDefaultInstance();
}
@java.lang.Override
public com.google.devtools.artifactregistry.v1beta2.UpdateRepositoryRequest build() {
com.google.devtools.artifactregistry.v1beta2.UpdateRepositoryRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.devtools.artifactregistry.v1beta2.UpdateRepositoryRequest buildPartial() {
com.google.devtools.artifactregistry.v1beta2.UpdateRepositoryRequest result =
new com.google.devtools.artifactregistry.v1beta2.UpdateRepositoryRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.devtools.artifactregistry.v1beta2.UpdateRepositoryRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.repository_ = repositoryBuilder_ == null ? repository_ : repositoryBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.devtools.artifactregistry.v1beta2.UpdateRepositoryRequest) {
return mergeFrom(
(com.google.devtools.artifactregistry.v1beta2.UpdateRepositoryRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.devtools.artifactregistry.v1beta2.UpdateRepositoryRequest other) {
if (other
== com.google.devtools.artifactregistry.v1beta2.UpdateRepositoryRequest
.getDefaultInstance()) return this;
if (other.hasRepository()) {
mergeRepository(other.getRepository());
}
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getRepositoryFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.devtools.artifactregistry.v1beta2.Repository repository_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.devtools.artifactregistry.v1beta2.Repository,
com.google.devtools.artifactregistry.v1beta2.Repository.Builder,
com.google.devtools.artifactregistry.v1beta2.RepositoryOrBuilder>
repositoryBuilder_;
/**
*
*
* <pre>
* The repository that replaces the resource on the server.
* </pre>
*
* <code>.google.devtools.artifactregistry.v1beta2.Repository repository = 1;</code>
*
* @return Whether the repository field is set.
*/
public boolean hasRepository() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* The repository that replaces the resource on the server.
* </pre>
*
* <code>.google.devtools.artifactregistry.v1beta2.Repository repository = 1;</code>
*
* @return The repository.
*/
public com.google.devtools.artifactregistry.v1beta2.Repository getRepository() {
if (repositoryBuilder_ == null) {
return repository_ == null
? com.google.devtools.artifactregistry.v1beta2.Repository.getDefaultInstance()
: repository_;
} else {
return repositoryBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* The repository that replaces the resource on the server.
* </pre>
*
* <code>.google.devtools.artifactregistry.v1beta2.Repository repository = 1;</code>
*/
public Builder setRepository(com.google.devtools.artifactregistry.v1beta2.Repository value) {
if (repositoryBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
repository_ = value;
} else {
repositoryBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* The repository that replaces the resource on the server.
* </pre>
*
* <code>.google.devtools.artifactregistry.v1beta2.Repository repository = 1;</code>
*/
public Builder setRepository(
com.google.devtools.artifactregistry.v1beta2.Repository.Builder builderForValue) {
if (repositoryBuilder_ == null) {
repository_ = builderForValue.build();
} else {
repositoryBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* The repository that replaces the resource on the server.
* </pre>
*
* <code>.google.devtools.artifactregistry.v1beta2.Repository repository = 1;</code>
*/
public Builder mergeRepository(com.google.devtools.artifactregistry.v1beta2.Repository value) {
if (repositoryBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& repository_ != null
&& repository_
!= com.google.devtools.artifactregistry.v1beta2.Repository.getDefaultInstance()) {
getRepositoryBuilder().mergeFrom(value);
} else {
repository_ = value;
}
} else {
repositoryBuilder_.mergeFrom(value);
}
if (repository_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* The repository that replaces the resource on the server.
* </pre>
*
* <code>.google.devtools.artifactregistry.v1beta2.Repository repository = 1;</code>
*/
public Builder clearRepository() {
bitField0_ = (bitField0_ & ~0x00000001);
repository_ = null;
if (repositoryBuilder_ != null) {
repositoryBuilder_.dispose();
repositoryBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* The repository that replaces the resource on the server.
* </pre>
*
* <code>.google.devtools.artifactregistry.v1beta2.Repository repository = 1;</code>
*/
public com.google.devtools.artifactregistry.v1beta2.Repository.Builder getRepositoryBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getRepositoryFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* The repository that replaces the resource on the server.
* </pre>
*
* <code>.google.devtools.artifactregistry.v1beta2.Repository repository = 1;</code>
*/
public com.google.devtools.artifactregistry.v1beta2.RepositoryOrBuilder
getRepositoryOrBuilder() {
if (repositoryBuilder_ != null) {
return repositoryBuilder_.getMessageOrBuilder();
} else {
return repository_ == null
? com.google.devtools.artifactregistry.v1beta2.Repository.getDefaultInstance()
: repository_;
}
}
/**
*
*
* <pre>
* The repository that replaces the resource on the server.
* </pre>
*
* <code>.google.devtools.artifactregistry.v1beta2.Repository repository = 1;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.devtools.artifactregistry.v1beta2.Repository,
com.google.devtools.artifactregistry.v1beta2.Repository.Builder,
com.google.devtools.artifactregistry.v1beta2.RepositoryOrBuilder>
getRepositoryFieldBuilder() {
if (repositoryBuilder_ == null) {
repositoryBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.devtools.artifactregistry.v1beta2.Repository,
com.google.devtools.artifactregistry.v1beta2.Repository.Builder,
com.google.devtools.artifactregistry.v1beta2.RepositoryOrBuilder>(
getRepository(), getParentForChildren(), isClean());
repository_ = null;
}
return repositoryBuilder_;
}
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* The update mask applies to the resource. For the `FieldMask` definition,
* see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* The update mask applies to the resource. For the `FieldMask` definition,
* see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* The update mask applies to the resource. For the `FieldMask` definition,
* see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
} else {
updateMaskBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The update mask applies to the resource. For the `FieldMask` definition,
* see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The update mask applies to the resource. For the `FieldMask` definition,
* see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& updateMask_ != null
&& updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) {
getUpdateMaskBuilder().mergeFrom(value);
} else {
updateMask_ = value;
}
} else {
updateMaskBuilder_.mergeFrom(value);
}
if (updateMask_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* The update mask applies to the resource. For the `FieldMask` definition,
* see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder clearUpdateMask() {
bitField0_ = (bitField0_ & ~0x00000002);
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* The update mask applies to the resource. For the `FieldMask` definition,
* see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* The update mask applies to the resource. For the `FieldMask` definition,
* see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* The update mask applies to the resource. For the `FieldMask` definition,
* see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.devtools.artifactregistry.v1beta2.UpdateRepositoryRequest)
}
// @@protoc_insertion_point(class_scope:google.devtools.artifactregistry.v1beta2.UpdateRepositoryRequest)
private static final com.google.devtools.artifactregistry.v1beta2.UpdateRepositoryRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.devtools.artifactregistry.v1beta2.UpdateRepositoryRequest();
}
public static com.google.devtools.artifactregistry.v1beta2.UpdateRepositoryRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateRepositoryRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateRepositoryRequest>() {
@java.lang.Override
public UpdateRepositoryRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdateRepositoryRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateRepositoryRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.devtools.artifactregistry.v1beta2.UpdateRepositoryRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/harmony | 35,145 | classlib/modules/awt/src/main/java/common/org/apache/harmony/awt/gl/font/TextRunSegmentImpl.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* @author Oleg V. Khaschansky
*
*/
package org.apache.harmony.awt.gl.font;
import java.awt.*;
import java.awt.font.*;
import java.awt.geom.Rectangle2D;
import java.awt.geom.GeneralPath;
import java.awt.geom.AffineTransform;
import java.awt.geom.Point2D;
// XXX - TODO - bidi not implemented yet
//import java.text.Bidi;
import java.util.Arrays;
import org.apache.harmony.awt.internal.nls.Messages;
/**
* Date: Apr 25, 2005
* Time: 4:33:18 PM
*
* This class contains the implementation of the behavior of the
* text run segment with constant text attributes and direction.
*/
public class TextRunSegmentImpl {
/**
* This class contains basic information required for creation
* of the glyph-based text run segment.
*/
public static class TextSegmentInfo {
// XXX - TODO - bidi not implemented yet
//Bidi bidi;
Font font;
FontRenderContext frc;
char text[];
int start;
int end;
int length;
int flags = 0;
byte level = 0;
TextSegmentInfo(
byte level,
Font font, FontRenderContext frc,
char text[], int start, int end
) {
this.font = font;
this.frc = frc;
this.text = text;
this.start = start;
this.end = end;
this.level = level;
length = end - start;
}
}
/**
* This class represents a simple text segment backed by the glyph vector
*/
public static class TextRunSegmentCommon extends TextRunSegment {
TextSegmentInfo info;
private GlyphVector gv;
private float advanceIncrements[];
private int char2glyph[];
private GlyphJustificationInfo gjis[]; // Glyph justification info
TextRunSegmentCommon(TextSegmentInfo i, TextDecorator.Decoration d) {
// XXX - todo - check support bidi
i.flags &= ~0x09; // Clear bidi flags
if ((i.level & 0x1) != 0) {
i.flags |= Font.LAYOUT_RIGHT_TO_LEFT;
}
info = i;
this.decoration = d;
LineMetrics lm = i.font.getLineMetrics(i.text, i.start, i.end, i.frc);
this.metrics = new BasicMetrics(lm, i.font);
if (lm.getNumChars() != i.length) { // XXX todo - This should be handled
// awt.41=Font returned unsupported type of line metrics. This case is known, but not supported yet.
throw new UnsupportedOperationException(
Messages.getString("awt.41")); //$NON-NLS-1$
}
}
@Override
public Object clone() {
return new TextRunSegmentCommon(info, decoration);
}
/**
* Creates glyph vector from the managed text if needed
* @return glyph vector
*/
private GlyphVector getGlyphVector() {
if (gv==null) {
gv = info.font.layoutGlyphVector(
info.frc,
info.text,
info.start,
info.end - info.start, // NOTE: This parameter violates
// spec, it is count,
// not limit as spec states
info.flags
);
}
return gv;
}
/**
* Renders this text run segment
* @param g2d - graphics to render to
* @param xOffset - X offset from the graphics origin to the
* origin of the text layout
* @param yOffset - Y offset from the graphics origin to the
* origin of the text layout
*/
@Override
void draw(Graphics2D g2d, float xOffset, float yOffset) {
if (decoration == null) {
g2d.drawGlyphVector(getGlyphVector(), xOffset + x, yOffset + y);
} else {
TextDecorator.prepareGraphics(this, g2d, xOffset, yOffset);
g2d.drawGlyphVector(getGlyphVector(), xOffset + x, yOffset + y);
TextDecorator.drawTextDecorations(this, g2d, xOffset, yOffset);
TextDecorator.restoreGraphics(decoration, g2d);
}
}
/**
* Returns visual bounds of this segment
* @return visual bounds
*/
@Override
Rectangle2D getVisualBounds() {
if (visualBounds == null) {
visualBounds =
TextDecorator.extendVisualBounds(
this,
getGlyphVector().getVisualBounds(),
decoration
);
visualBounds.setRect(
x + visualBounds.getX(),
y + visualBounds.getY(),
visualBounds.getWidth(),
visualBounds.getHeight()
);
}
return (Rectangle2D) visualBounds.clone();
}
/**
* Returns logical bounds of this segment
* @return logical bounds
*/
@Override
Rectangle2D getLogicalBounds() {
if (logicalBounds == null) {
logicalBounds = getGlyphVector().getLogicalBounds();
logicalBounds.setRect(
x + logicalBounds.getX(),
y + logicalBounds.getY(),
logicalBounds.getWidth(),
logicalBounds.getHeight()
);
}
return (Rectangle2D) logicalBounds.clone();
}
@Override
float getAdvance() {
return (float) getLogicalBounds().getWidth();
}
/**
* Attempts to map each character to the corresponding advance increment
*/
void initAdvanceMapping() {
GlyphVector gv = getGlyphVector();
int charIndicies[] = gv.getGlyphCharIndices(0, gv.getNumGlyphs(), null);
advanceIncrements = new float[info.length];
for (int i=0; i<charIndicies.length; i++) {
advanceIncrements[charIndicies[i]] = gv.getGlyphMetrics(i).getAdvance();
}
}
/**
* Calculates advance delta between two characters
* @param start - 1st position
* @param end - 2nd position
* @return advance increment between specified positions
*/
@Override
float getAdvanceDelta(int start, int end) {
// Get coordinates in the segment context
start -= info.start;
end -= info.start;
if (advanceIncrements == null) {
initAdvanceMapping();
}
if (start < 0) {
start = 0;
}
if (end > info.length) {
end = info.length;
}
float sum = 0;
for (int i=start; i<end; i++) {
sum += advanceIncrements[i];
}
return sum;
}
/**
* Calculates index of the character which advance is equal to
* the given. If the given advance is greater then the segment
* advance it returns the position after the last character.
* @param advance - given advance
* @param start - character, from which to start measuring advance
* @return character index
*/
@Override
int getCharIndexFromAdvance(float advance, int start) {
// XXX - todo - probably, possible to optimize
// Add check if the given advance is greater then
// the segment advance in the beginning. In this case
// we don't need to run through all increments
if (advanceIncrements == null) {
initAdvanceMapping();
}
start -= info.start;
if (start < 0) {
start = 0;
}
int i = start;
for (; i<info.length; i++) {
advance -= advanceIncrements[i];
if (advance < 0) {
break;
}
}
return i + info.start;
}
@Override
int getStart() {
return info.start;
}
@Override
int getEnd() {
return info.end;
}
@Override
int getLength() {
return info.length;
}
/**
* Attempts to create mapping of the characters to glyphs in the glyph vector.
* @return array where for each character index stored corresponding glyph index
*/
private int[] getChar2Glyph() {
if (char2glyph == null) {
GlyphVector gv = getGlyphVector();
char2glyph = new int[info.length];
Arrays.fill(char2glyph, -1);
// Fill glyph indicies for first characters corresponding to each glyph
int charIndicies[] = gv.getGlyphCharIndices(0, gv.getNumGlyphs(), null);
for (int i=0; i<charIndicies.length; i++) {
char2glyph[charIndicies[i]] = i;
}
// If several characters corresponds to one glyph, create mapping for them
// Suppose that these characters are going all together
int currIndex = 0;
for (int i=0; i<char2glyph.length; i++) {
if (char2glyph[i] < 0) {
char2glyph[i] = currIndex;
} else {
currIndex = char2glyph[i];
}
}
}
return char2glyph;
}
/**
* Creates black box bounds shape for the specified range
* @param start - range sart
* @param limit - range end
* @return black box bounds shape
*/
@Override
Shape getCharsBlackBoxBounds(int start, int limit) {
start -= info.start;
limit -= info.start;
if (limit > info.length) {
limit = info.length;
}
GeneralPath result = new GeneralPath();
int glyphIndex = 0;
for (int i=start; i<limit; i++) {
glyphIndex = getChar2Glyph()[i];
result.append(getGlyphVector().getGlyphVisualBounds(glyphIndex), false);
}
// Shift to the segment's coordinates
result.transform(AffineTransform.getTranslateInstance(x, y));
return result;
}
/**
* Calculates position of the character on the screen
* @param index - character index
* @return X coordinate of the character position
*/
@Override
float getCharPosition(int index) {
index -= info.start;
if (index > info.length) {
index = info.length;
}
float result = 0;
int glyphIndex = getChar2Glyph()[index];
result = (float) getGlyphVector().getGlyphPosition(glyphIndex).getX();
// Shift to the segment's coordinates
result += x;
return result;
}
/**
* Returns the advance of the individual character
* @param index - character index
* @return character advance
*/
@Override
float getCharAdvance(int index) {
if (advanceIncrements == null) {
initAdvanceMapping();
}
return advanceIncrements[index - this.getStart()];
}
/**
* Returns the outline shape
* @return outline
*/
@Override
Shape getOutline() {
AffineTransform t = AffineTransform.getTranslateInstance(x, y);
return t.createTransformedShape(
TextDecorator.extendOutline(
this,
getGlyphVector().getOutline(),
decoration
)
);
}
/**
* Checks if the character doesn't contribute to the text advance
* @param index - character index
* @return true if the character has zero advance
*/
@Override
boolean charHasZeroAdvance(int index) {
if (advanceIncrements == null) {
initAdvanceMapping();
}
return advanceIncrements[index - this.getStart()] == 0;
}
/**
* Creates text hit info from the hit position
* @param hitX - X coordinate relative to the origin of the layout
* @param hitY - Y coordinate relative to the origin of the layout
* @return hit info
*/
@Override
TextHitInfo hitTest(float hitX, float hitY) {
hitX -= x;
float glyphPositions[] =
getGlyphVector().getGlyphPositions(0, info.length+1, null);
int glyphIdx;
boolean leading = false;
for (glyphIdx = 1; glyphIdx <= info.length; glyphIdx++) {
if (glyphPositions[(glyphIdx)*2] >= hitX) {
float advance =
glyphPositions[(glyphIdx)*2] - glyphPositions[(glyphIdx-1)*2];
leading = glyphPositions[(glyphIdx-1)*2] + advance/2 > hitX ? true : false;
glyphIdx--;
break;
}
}
if (glyphIdx == info.length) {
glyphIdx--;
}
int charIdx = getGlyphVector().getGlyphCharIndex(glyphIdx);
return (leading) ^ ((info.level & 0x1) == 0x1)?
TextHitInfo.leading(charIdx + info.start) :
TextHitInfo.trailing(charIdx + info.start);
}
/**
* Collects GlyphJustificationInfo objects from the glyph vector
* @return array of all GlyphJustificationInfo objects
*/
private GlyphJustificationInfo[] getGlyphJustificationInfos() {
if (gjis == null) {
GlyphVector gv = getGlyphVector();
int nGlyphs = gv.getNumGlyphs();
int charIndicies[] = gv.getGlyphCharIndices(0, nGlyphs, null);
gjis = new GlyphJustificationInfo[nGlyphs];
// Patch: temporary patch, getGlyphJustificationInfo is not implemented
float fontSize = info.font.getSize2D();
GlyphJustificationInfo defaultInfo =
new GlyphJustificationInfo(
0, // weight
false, GlyphJustificationInfo.PRIORITY_NONE, 0, 0, // grow
false, GlyphJustificationInfo.PRIORITY_NONE, 0, 0); // shrink
GlyphJustificationInfo spaceInfo = new GlyphJustificationInfo(
fontSize, // weight
true, GlyphJustificationInfo.PRIORITY_WHITESPACE, 0, fontSize, // grow
true, GlyphJustificationInfo.PRIORITY_WHITESPACE, 0, fontSize); // shrink
////////
// Temporary patch, getGlyphJustificationInfo is not implemented
for (int i = 0; i < nGlyphs; i++) {
//gjis[i] = getGlyphVector().getGlyphJustificationInfo(i);
char c = info.text[charIndicies[i] + info.start];
if (Character.isWhitespace(c)) {
gjis[i] = spaceInfo;
} else {
gjis[i] = defaultInfo;
}
// End patch
}
}
return gjis;
}
/**
* Collects justification information into JustificationInfo object
* @param jInfo - JustificationInfo object
*/
@Override
void updateJustificationInfo(TextRunBreaker.JustificationInfo jInfo) {
int lastChar = Math.min(jInfo.lastIdx, info.end) - info.start;
boolean haveFirst = info.start <= jInfo.firstIdx;
boolean haveLast = info.end >= (jInfo.lastIdx + 1);
int prevGlyphIdx = -1;
int currGlyphIdx;
if (jInfo.grow) { // Check how much we can grow/shrink on current priority level
for (int i=0; i<lastChar; i++) {
currGlyphIdx = getChar2Glyph()[i];
if (currGlyphIdx == prevGlyphIdx) {
// Several chars could be represented by one glyph,
// suppose they are contiguous
continue;
}
prevGlyphIdx = currGlyphIdx;
GlyphJustificationInfo gji = getGlyphJustificationInfos()[currGlyphIdx];
if (gji.growPriority == jInfo.priority) {
jInfo.weight += gji.weight * 2;
jInfo.growLimit += gji.growLeftLimit;
jInfo.growLimit += gji.growRightLimit;
if (gji.growAbsorb) {
jInfo.absorbedWeight += gji.weight * 2;
}
}
}
} else {
for (int i=0; i<lastChar; i++) {
currGlyphIdx = getChar2Glyph()[i];
if (currGlyphIdx == prevGlyphIdx) {
continue;
}
prevGlyphIdx = currGlyphIdx;
GlyphJustificationInfo gji = getGlyphJustificationInfos()[currGlyphIdx];
if (gji.shrinkPriority == jInfo.priority) {
jInfo.weight += gji.weight * 2;
jInfo.growLimit -= gji.shrinkLeftLimit;
jInfo.growLimit -= gji.shrinkRightLimit;
if (gji.shrinkAbsorb) {
jInfo.absorbedWeight += gji.weight * 2;
}
}
}
}
if (haveFirst) { // Don't add padding before first char
GlyphJustificationInfo gji = getGlyphJustificationInfos()[getChar2Glyph()[0]];
jInfo.weight -= gji.weight;
if (jInfo.grow) {
jInfo.growLimit -= gji.growLeftLimit;
if (gji.growAbsorb) {
jInfo.absorbedWeight -= gji.weight;
}
} else {
jInfo.growLimit += gji.shrinkLeftLimit;
if (gji.shrinkAbsorb) {
jInfo.absorbedWeight -= gji.weight;
}
}
}
if (haveLast) { // Don't add padding after last char
GlyphJustificationInfo gji =
getGlyphJustificationInfos()[getChar2Glyph()[lastChar]];
jInfo.weight -= gji.weight;
if (jInfo.grow) {
jInfo.growLimit -= gji.growRightLimit;
if (gji.growAbsorb) {
jInfo.absorbedWeight -= gji.weight;
}
} else {
jInfo.growLimit += gji.shrinkRightLimit;
if (gji.shrinkAbsorb) {
jInfo.absorbedWeight -= gji.weight;
}
}
}
}
/**
* Performs justification of the segment.
* Updates positions of individual characters.
* @param jInfos - justification information, gathered by the previous passes
* @return amount of growth or shrink of the segment
*/
@Override
float doJustification(TextRunBreaker.JustificationInfo jInfos[]) {
int lastPriority =
jInfos[jInfos.length-1] == null ?
-1 : jInfos[jInfos.length-1].priority;
// Get the highest priority
int highestPriority = 0;
for (; highestPriority<jInfos.length; highestPriority++) {
if (jInfos[highestPriority] != null) {
break;
}
}
if (highestPriority == jInfos.length) {
return 0;
}
TextRunBreaker.JustificationInfo firstInfo = jInfos[highestPriority];
TextRunBreaker.JustificationInfo lastInfo =
lastPriority > 0 ? jInfos[lastPriority] : null;
boolean haveFirst = info.start <= firstInfo.firstIdx;
boolean haveLast = info.end >= (firstInfo.lastIdx + 1);
// Here we suppose that GLYPHS are ordered LEFT TO RIGHT
int firstGlyph = haveFirst ?
getChar2Glyph()[firstInfo.firstIdx - info.start] :
getChar2Glyph()[0];
int lastGlyph = haveLast ?
getChar2Glyph()[firstInfo.lastIdx - info.start] :
getChar2Glyph()[info.length - 1];
if (haveLast) {
lastGlyph--;
}
TextRunBreaker.JustificationInfo currInfo;
float glyphOffset = 0;
float positionIncrement = 0;
float sideIncrement = 0;
if (haveFirst) { // Don't add padding before first char
GlyphJustificationInfo gji = getGlyphJustificationInfos()[firstGlyph];
currInfo = jInfos[gji.growPriority];
if (currInfo != null) {
if (currInfo.useLimits) {
if (currInfo.absorb) {
glyphOffset += gji.weight * currInfo.absorbedGapPerUnit;
} else if (
lastInfo != null &&
lastInfo.priority == currInfo.priority
) {
glyphOffset += gji.weight * lastInfo.absorbedGapPerUnit;
}
glyphOffset +=
firstInfo.grow ?
gji.growRightLimit :
-gji.shrinkRightLimit;
} else {
glyphOffset += gji.weight * currInfo.gapPerUnit;
}
}
firstGlyph++;
}
if (firstInfo.grow) {
for (int i=firstGlyph; i<=lastGlyph; i++) {
GlyphJustificationInfo gji = getGlyphJustificationInfos()[i];
currInfo = jInfos[gji.growPriority];
if (currInfo == null) {
// We still have to increment glyph position
Point2D glyphPos = getGlyphVector().getGlyphPosition(i);
glyphPos.setLocation(glyphPos.getX() + glyphOffset, glyphPos.getY());
getGlyphVector().setGlyphPosition(i, glyphPos);
continue;
}
if (currInfo.useLimits) {
glyphOffset += gji.growLeftLimit;
if (currInfo.absorb) {
sideIncrement = gji.weight * currInfo.absorbedGapPerUnit;
glyphOffset += sideIncrement;
positionIncrement = glyphOffset;
glyphOffset += sideIncrement;
} else if (lastInfo != null && lastInfo.priority == currInfo.priority) {
sideIncrement = gji.weight * lastInfo.absorbedGapPerUnit;
glyphOffset += sideIncrement;
positionIncrement = glyphOffset;
glyphOffset += sideIncrement;
} else {
positionIncrement = glyphOffset;
}
glyphOffset += gji.growRightLimit;
} else {
sideIncrement = gji.weight * currInfo.gapPerUnit;
glyphOffset += sideIncrement;
positionIncrement = glyphOffset;
glyphOffset += sideIncrement;
}
Point2D glyphPos = getGlyphVector().getGlyphPosition(i);
glyphPos.setLocation(glyphPos.getX() + positionIncrement, glyphPos.getY());
getGlyphVector().setGlyphPosition(i, glyphPos);
}
} else {
for (int i=firstGlyph; i<=lastGlyph; i++) {
GlyphJustificationInfo gji = getGlyphJustificationInfos()[i];
currInfo = jInfos[gji.shrinkPriority];
if (currInfo == null) {
// We still have to increment glyph position
Point2D glyphPos = getGlyphVector().getGlyphPosition(i);
glyphPos.setLocation(glyphPos.getX() + glyphOffset, glyphPos.getY());
getGlyphVector().setGlyphPosition(i, glyphPos);
continue;
}
if (currInfo.useLimits) {
glyphOffset -= gji.shrinkLeftLimit;
if (currInfo.absorb) {
sideIncrement = gji.weight * currInfo.absorbedGapPerUnit;
glyphOffset += sideIncrement;
positionIncrement = glyphOffset;
glyphOffset += sideIncrement;
} else if (lastInfo != null && lastInfo.priority == currInfo.priority) {
sideIncrement = gji.weight * lastInfo.absorbedGapPerUnit;
glyphOffset += sideIncrement;
positionIncrement = glyphOffset;
glyphOffset += sideIncrement;
} else {
positionIncrement = glyphOffset;
}
glyphOffset -= gji.shrinkRightLimit;
} else {
sideIncrement = gji.weight * currInfo.gapPerUnit;
glyphOffset += sideIncrement;
positionIncrement = glyphOffset;
glyphOffset += sideIncrement;
}
Point2D glyphPos = getGlyphVector().getGlyphPosition(i);
glyphPos.setLocation(glyphPos.getX() + positionIncrement, glyphPos.getY());
getGlyphVector().setGlyphPosition(i, glyphPos);
}
}
if (haveLast) { // Don't add padding after last char
lastGlyph++;
GlyphJustificationInfo gji = getGlyphJustificationInfos()[lastGlyph];
currInfo = jInfos[gji.growPriority];
if (currInfo != null) {
if (currInfo.useLimits) {
glyphOffset += firstInfo.grow ? gji.growLeftLimit : -gji.shrinkLeftLimit;
if (currInfo.absorb) {
glyphOffset += gji.weight * currInfo.absorbedGapPerUnit;
} else if (lastInfo != null && lastInfo.priority == currInfo.priority) {
glyphOffset += gji.weight * lastInfo.absorbedGapPerUnit;
}
} else {
glyphOffset += gji.weight * currInfo.gapPerUnit;
}
}
// Ajust positions of all glyphs after last glyph
for (int i=lastGlyph; i<getGlyphVector().getNumGlyphs()+1; i++) {
Point2D glyphPos = getGlyphVector().getGlyphPosition(i);
glyphPos.setLocation(glyphPos.getX() + glyphOffset, glyphPos.getY());
getGlyphVector().setGlyphPosition(i, glyphPos);
}
} else { // Update position after last glyph in glyph vector -
// to get correct advance for it
Point2D glyphPos = getGlyphVector().getGlyphPosition(lastGlyph+1);
glyphPos.setLocation(glyphPos.getX() + glyphOffset, glyphPos.getY());
getGlyphVector().setGlyphPosition(lastGlyph+1, glyphPos);
}
gjis = null; // We don't need justification infos any more
// Also we have to reset cached bounds and metrics
this.visualBounds = null;
this.logicalBounds = null;
return glyphOffset; // How much our segment grown or shrunk
}
}
public static class TextRunSegmentGraphic extends TextRunSegment {
GraphicAttribute ga;
int start;
int length;
float fullAdvance;
TextRunSegmentGraphic(GraphicAttribute attr, int len, int start) {
this.start = start;
length = len;
ga = attr;
metrics = new BasicMetrics(ga);
fullAdvance = ga.getAdvance() * length;
}
@Override
public Object clone() {
return new TextRunSegmentGraphic(ga, length, start);
}
// Renders this text run segment
@Override
void draw(Graphics2D g2d, float xOffset, float yOffset) {
if (decoration != null) {
TextDecorator.prepareGraphics(this, g2d, xOffset, yOffset);
}
float xPos = x + xOffset;
float yPos = y + yOffset;
for (int i=0; i < length; i++) {
ga.draw(g2d, xPos, yPos);
xPos += ga.getAdvance();
}
if (decoration != null) {
TextDecorator.drawTextDecorations(this, g2d, xOffset, yOffset);
TextDecorator.restoreGraphics(decoration, g2d);
}
}
// Returns visual bounds of this segment
@Override
Rectangle2D getVisualBounds() {
if (visualBounds == null) {
Rectangle2D bounds = ga.getBounds();
// First and last chars can be out of logical bounds, so we calculate
// (bounds.getWidth() - ga.getAdvance()) which is exactly the difference
bounds.setRect(
bounds.getMinX() + x,
bounds.getMinY() + y,
bounds.getWidth() - ga.getAdvance() + getAdvance(),
bounds.getHeight()
);
visualBounds = TextDecorator.extendVisualBounds(this, bounds, decoration);
}
return (Rectangle2D) visualBounds.clone();
}
@Override
Rectangle2D getLogicalBounds() {
if (logicalBounds == null) {
logicalBounds =
new Rectangle2D.Float(
x, y - metrics.ascent,
getAdvance(), metrics.ascent + metrics.descent
);
}
return (Rectangle2D) logicalBounds.clone();
}
@Override
float getAdvance() {
return fullAdvance;
}
@Override
float getAdvanceDelta(int start, int end) {
return ga.getAdvance() * (end - start);
}
@Override
int getCharIndexFromAdvance(float advance, int start) {
start -= this.start;
if (start < 0) {
start = 0;
}
int charOffset = (int) (advance/ga.getAdvance());
if (charOffset + start > length) {
return length + this.start;
}
return charOffset + start + this.start;
}
@Override
int getStart() {
return start;
}
@Override
int getEnd() {
return start + length;
}
@Override
int getLength() {
return length;
}
@Override
Shape getCharsBlackBoxBounds(int start, int limit) {
start -= this.start;
limit -= this.start;
if (limit > length) {
limit = length;
}
Rectangle2D charBounds = ga.getBounds();
charBounds.setRect(
charBounds.getX() + ga.getAdvance() * start + x,
charBounds.getY() + y,
charBounds.getWidth() + ga.getAdvance() * (limit - start),
charBounds.getHeight()
);
return charBounds;
}
@Override
float getCharPosition(int index) {
index -= start;
if (index > length) {
index = length;
}
return ga.getAdvance() * index + x;
}
@Override
float getCharAdvance(int index) {
return ga.getAdvance();
}
@Override
Shape getOutline() {
AffineTransform t = AffineTransform.getTranslateInstance(x, y);
return t.createTransformedShape(
TextDecorator.extendOutline(this, getVisualBounds(), decoration)
);
}
@Override
boolean charHasZeroAdvance(int index) {
return false;
}
@Override
TextHitInfo hitTest(float hitX, float hitY) {
hitX -= x;
float tmp = hitX / ga.getAdvance();
int hitIndex = Math.round(tmp);
if (tmp > hitIndex) {
return TextHitInfo.leading(hitIndex + this.start);
}
return TextHitInfo.trailing(hitIndex + this.start);
}
@Override
void updateJustificationInfo(TextRunBreaker.JustificationInfo jInfo) {
// Do nothing
}
@Override
float doJustification(TextRunBreaker.JustificationInfo jInfos[]) {
// Do nothing
return 0;
}
}
}
|
apache/parquet-java | 35,168 | parquet-arrow/src/main/java/org/apache/parquet/arrow/schema/SchemaConverter.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.parquet.arrow.schema;
import static java.util.Arrays.asList;
import static java.util.Optional.empty;
import static java.util.Optional.of;
import static org.apache.parquet.schema.LogicalTypeAnnotation.TimeUnit.MICROS;
import static org.apache.parquet.schema.LogicalTypeAnnotation.TimeUnit.MILLIS;
import static org.apache.parquet.schema.LogicalTypeAnnotation.TimeUnit.NANOS;
import static org.apache.parquet.schema.LogicalTypeAnnotation.dateType;
import static org.apache.parquet.schema.LogicalTypeAnnotation.decimalType;
import static org.apache.parquet.schema.LogicalTypeAnnotation.intType;
import static org.apache.parquet.schema.LogicalTypeAnnotation.stringType;
import static org.apache.parquet.schema.LogicalTypeAnnotation.timeType;
import static org.apache.parquet.schema.LogicalTypeAnnotation.timestampType;
import static org.apache.parquet.schema.PrimitiveType.PrimitiveTypeName.BINARY;
import static org.apache.parquet.schema.PrimitiveType.PrimitiveTypeName.BOOLEAN;
import static org.apache.parquet.schema.PrimitiveType.PrimitiveTypeName.DOUBLE;
import static org.apache.parquet.schema.PrimitiveType.PrimitiveTypeName.FIXED_LEN_BYTE_ARRAY;
import static org.apache.parquet.schema.PrimitiveType.PrimitiveTypeName.FLOAT;
import static org.apache.parquet.schema.PrimitiveType.PrimitiveTypeName.INT32;
import static org.apache.parquet.schema.PrimitiveType.PrimitiveTypeName.INT64;
import static org.apache.parquet.schema.Type.Repetition.OPTIONAL;
import static org.apache.parquet.schema.Type.Repetition.REPEATED;
import static org.apache.parquet.schema.Type.Repetition.REQUIRED;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Optional;
import org.apache.arrow.vector.types.DateUnit;
import org.apache.arrow.vector.types.FloatingPointPrecision;
import org.apache.arrow.vector.types.TimeUnit;
import org.apache.arrow.vector.types.pojo.ArrowType;
import org.apache.arrow.vector.types.pojo.ArrowType.ArrowTypeVisitor;
import org.apache.arrow.vector.types.pojo.ArrowType.Binary;
import org.apache.arrow.vector.types.pojo.ArrowType.Bool;
import org.apache.arrow.vector.types.pojo.ArrowType.Date;
import org.apache.arrow.vector.types.pojo.ArrowType.Decimal;
import org.apache.arrow.vector.types.pojo.ArrowType.FloatingPoint;
import org.apache.arrow.vector.types.pojo.ArrowType.Int;
import org.apache.arrow.vector.types.pojo.ArrowType.Interval;
import org.apache.arrow.vector.types.pojo.ArrowType.Null;
import org.apache.arrow.vector.types.pojo.ArrowType.Struct;
import org.apache.arrow.vector.types.pojo.ArrowType.Time;
import org.apache.arrow.vector.types.pojo.ArrowType.Timestamp;
import org.apache.arrow.vector.types.pojo.ArrowType.Union;
import org.apache.arrow.vector.types.pojo.ArrowType.Utf8;
import org.apache.arrow.vector.types.pojo.Field;
import org.apache.arrow.vector.types.pojo.FieldType;
import org.apache.arrow.vector.types.pojo.Schema;
import org.apache.parquet.arrow.schema.SchemaMapping.ListTypeMapping;
import org.apache.parquet.arrow.schema.SchemaMapping.PrimitiveTypeMapping;
import org.apache.parquet.arrow.schema.SchemaMapping.RepeatedTypeMapping;
import org.apache.parquet.arrow.schema.SchemaMapping.StructTypeMapping;
import org.apache.parquet.arrow.schema.SchemaMapping.TypeMapping;
import org.apache.parquet.arrow.schema.SchemaMapping.UnionTypeMapping;
import org.apache.parquet.schema.GroupType;
import org.apache.parquet.schema.LogicalTypeAnnotation;
import org.apache.parquet.schema.MessageType;
import org.apache.parquet.schema.PrimitiveType;
import org.apache.parquet.schema.PrimitiveType.PrimitiveTypeName;
import org.apache.parquet.schema.Type;
import org.apache.parquet.schema.Type.Repetition;
import org.apache.parquet.schema.Types;
import org.apache.parquet.schema.Types.GroupBuilder;
/**
* Logic to convert Parquet and Arrow Schemas back and forth and maintain the mapping
*/
public class SchemaConverter {
// Indicates if Int96 should be converted to Arrow Timestamp
private final boolean convertInt96ToArrowTimestamp;
/**
* For when we'll need this to be configurable
*/
public SchemaConverter() {
this(false);
}
// TODO(PARQUET-1511): pass the parameters in a configuration object
public SchemaConverter(final boolean convertInt96ToArrowTimestamp) {
this.convertInt96ToArrowTimestamp = convertInt96ToArrowTimestamp;
}
/**
* Creates a Parquet Schema from an Arrow one and returns the mapping
*
* @param arrowSchema the provided Arrow Schema
* @return the mapping between the 2
*/
public SchemaMapping fromArrow(Schema arrowSchema) {
List<Field> fields = arrowSchema.getFields();
List<TypeMapping> parquetFields = fromArrow(fields);
MessageType parquetType =
addToBuilder(parquetFields, Types.buildMessage()).named("root");
return new SchemaMapping(arrowSchema, parquetType, parquetFields);
}
private <T> GroupBuilder<T> addToBuilder(List<TypeMapping> parquetFields, GroupBuilder<T> builder) {
for (TypeMapping type : parquetFields) {
builder = builder.addField(type.getParquetType());
}
return builder;
}
private List<TypeMapping> fromArrow(List<Field> fields) {
List<TypeMapping> result = new ArrayList<>(fields.size());
for (Field field : fields) {
result.add(fromArrow(field));
}
return result;
}
private TypeMapping fromArrow(final Field field) {
return fromArrow(field, field.getName());
}
/**
* @param field arrow field
* @param fieldName overrides field.getName()
* @return mapping
*/
private TypeMapping fromArrow(final Field field, final String fieldName) {
final List<Field> children = field.getChildren();
return field.getType().accept(new ArrowTypeVisitor<TypeMapping>() {
@Override
public TypeMapping visit(Null type) {
// TODO(PARQUET-757): null original type
return primitive(BINARY);
}
@Override
public TypeMapping visit(Struct type) {
List<TypeMapping> parquetTypes = fromArrow(children);
return new StructTypeMapping(
field,
addToBuilder(parquetTypes, Types.buildGroup(OPTIONAL)).named(fieldName),
parquetTypes);
}
@Override
public TypeMapping visit(org.apache.arrow.vector.types.pojo.ArrowType.List type) {
return createListTypeMapping();
}
@Override
public TypeMapping visit(ArrowType.LargeList largeList) {
return createListTypeMapping();
}
@Override
public TypeMapping visit(org.apache.arrow.vector.types.pojo.ArrowType.FixedSizeList type) {
return createListTypeMapping();
}
@Override
public TypeMapping visit(ArrowType.ListView type) {
return createListTypeMapping();
}
private ListTypeMapping createListTypeMapping() {
if (children.size() != 1) {
throw new IllegalArgumentException("list fields must have exactly one child: " + field);
}
TypeMapping parquetChild = fromArrow(children.get(0), "element");
GroupType list = Types.optionalList()
.element(parquetChild.getParquetType())
.named(fieldName);
return new ListTypeMapping(field, new List3Levels(list), parquetChild);
}
@Override
public TypeMapping visit(Union type) {
// TODO(PARQUET-756): add Union OriginalType
List<TypeMapping> parquetTypes = fromArrow(children);
return new UnionTypeMapping(
field,
addToBuilder(parquetTypes, Types.buildGroup(OPTIONAL)).named(fieldName),
parquetTypes);
}
@Override
public TypeMapping visit(ArrowType.Map map) {
if (children.size() != 2) {
throw new IllegalArgumentException("Map fields must have exactly two children: " + field);
}
TypeMapping keyChild = fromArrow(children.get(0), "key");
TypeMapping valueChild = fromArrow(children.get(1), "value");
GroupType groupType = Types.optionalMap()
.key(keyChild.getParquetType())
.value(valueChild.getParquetType())
.named(fieldName);
return new SchemaMapping.MapTypeMapping(field, new Map3Levels(groupType), keyChild, valueChild);
}
@Override
public TypeMapping visit(Int type) {
boolean signed = type.getIsSigned();
switch (type.getBitWidth()) {
case 8:
case 16:
case 32:
return primitive(INT32, intType(type.getBitWidth(), signed));
case 64:
return primitive(INT64, intType(64, signed));
default:
throw new IllegalArgumentException("Illegal int type: " + field);
}
}
@Override
public TypeMapping visit(FloatingPoint type) {
switch (type.getPrecision()) {
case HALF:
// TODO(PARQUET-757): original type HalfFloat
return primitive(FLOAT);
case SINGLE:
return primitive(FLOAT);
case DOUBLE:
return primitive(DOUBLE);
default:
throw new IllegalArgumentException("Illegal float type: " + field);
}
}
@Override
public TypeMapping visit(Utf8 type) {
return primitive(BINARY, stringType());
}
@Override
public TypeMapping visit(ArrowType.LargeUtf8 largeUtf8) {
return primitive(BINARY, stringType());
}
@Override
public TypeMapping visit(ArrowType.Utf8View type) {
return primitive(BINARY, stringType());
}
@Override
public TypeMapping visit(Binary type) {
return primitive(BINARY);
}
@Override
public TypeMapping visit(ArrowType.BinaryView type) {
return primitive(BINARY);
}
@Override
public TypeMapping visit(ArrowType.LargeBinary largeBinary) {
return primitive(BINARY);
}
@Override
public TypeMapping visit(Bool type) {
return primitive(BOOLEAN);
}
/**
* See https://github.com/apache/parquet-format/blob/master/LogicalTypes.md#decimal
* @param type an arrow decimal type
* @return a mapping from the arrow decimal to the Parquet type
*/
@Override
public TypeMapping visit(Decimal type) {
int precision = type.getPrecision();
int scale = type.getScale();
if (1 <= precision && precision <= 9) {
return decimal(INT32, precision, scale);
} else if (1 <= precision && precision <= 18) {
return decimal(INT64, precision, scale);
} else {
// Better: FIXED_LENGTH_BYTE_ARRAY with length
return decimal(BINARY, precision, scale);
}
}
@Override
public TypeMapping visit(Date type) {
return primitive(INT32, dateType());
}
@Override
public TypeMapping visit(Time type) {
int bitWidth = type.getBitWidth();
TimeUnit timeUnit = type.getUnit();
if (bitWidth == 32 && timeUnit == TimeUnit.MILLISECOND) {
return primitive(INT32, timeType(false, MILLIS));
} else if (bitWidth == 64 && timeUnit == TimeUnit.MICROSECOND) {
return primitive(INT64, timeType(false, MICROS));
} else if (bitWidth == 64 && timeUnit == TimeUnit.NANOSECOND) {
return primitive(INT64, timeType(false, NANOS));
}
throw new UnsupportedOperationException("Unsupported type " + type);
}
@Override
public TypeMapping visit(Timestamp type) {
TimeUnit timeUnit = type.getUnit();
if (timeUnit == TimeUnit.MILLISECOND) {
return primitive(INT64, timestampType(isUtcNormalized(type), MILLIS));
} else if (timeUnit == TimeUnit.MICROSECOND) {
return primitive(INT64, timestampType(isUtcNormalized(type), MICROS));
} else if (timeUnit == TimeUnit.NANOSECOND) {
return primitive(INT64, timestampType(isUtcNormalized(type), NANOS));
}
throw new UnsupportedOperationException("Unsupported type " + type);
}
private boolean isUtcNormalized(Timestamp timestamp) {
String timeZone = timestamp.getTimezone();
return timeZone != null && !timeZone.isEmpty();
}
/**
* See https://github.com/apache/parquet-format/blob/master/LogicalTypes.md#interval
*/
@Override
public TypeMapping visit(Interval type) {
// TODO(PARQUET-675): fix interval original types
return primitiveFLBA(12, LogicalTypeAnnotation.IntervalLogicalTypeAnnotation.getInstance());
}
@Override
public TypeMapping visit(ArrowType.Duration duration) {
return primitiveFLBA(12, LogicalTypeAnnotation.IntervalLogicalTypeAnnotation.getInstance());
}
@Override
public TypeMapping visit(ArrowType.ExtensionType type) {
return ArrowTypeVisitor.super.visit(type);
}
@Override
public TypeMapping visit(ArrowType.FixedSizeBinary fixedSizeBinary) {
return primitive(BINARY);
}
private TypeMapping mapping(PrimitiveType parquetType) {
return new PrimitiveTypeMapping(field, parquetType);
}
private TypeMapping decimal(PrimitiveTypeName type, int precision, int scale) {
return mapping(
Types.optional(type).as(decimalType(scale, precision)).named(fieldName));
}
private TypeMapping primitive(PrimitiveTypeName type) {
return mapping(Types.optional(type).named(fieldName));
}
private TypeMapping primitive(PrimitiveTypeName type, LogicalTypeAnnotation otype) {
return mapping(Types.optional(type).as(otype).named(fieldName));
}
private TypeMapping primitiveFLBA(int length, LogicalTypeAnnotation otype) {
return mapping(Types.optional(FIXED_LEN_BYTE_ARRAY)
.length(length)
.as(otype)
.named(fieldName));
}
});
}
/**
* Creates an Arrow Schema from an Parquet one and returns the mapping
*
* @param parquetSchema the provided Parquet Schema
* @return the mapping between the 2
*/
public SchemaMapping fromParquet(MessageType parquetSchema) {
List<Type> fields = parquetSchema.getFields();
List<TypeMapping> mappings = fromParquet(fields);
List<Field> arrowFields = fields(mappings);
return new SchemaMapping(new Schema(arrowFields), parquetSchema, mappings);
}
private List<Field> fields(List<TypeMapping> mappings) {
List<Field> result = new ArrayList<>(mappings.size());
for (TypeMapping typeMapping : mappings) {
result.add(typeMapping.getArrowField());
}
return result;
}
private List<TypeMapping> fromParquet(List<Type> fields) {
List<TypeMapping> result = new ArrayList<>(fields.size());
for (Type type : fields) {
result.add(fromParquet(type));
}
return result;
}
private TypeMapping fromParquet(Type type) {
return fromParquet(type, type.getName(), type.getRepetition());
}
/**
* @param type parquet type
* @param name overrides parquet.getName)
* @param repetition overrides parquet.getRepetition()
* @return a type mapping from the Parquet type to an Arrow type
*/
private TypeMapping fromParquet(Type type, String name, Repetition repetition) {
if (repetition == REPEATED) {
// case where we have a repeated field that is not in a List/Map
TypeMapping child = fromParquet(type, null, REQUIRED);
Field arrowField = new Field(
name,
FieldType.notNullable(new ArrowType.List()),
Collections.singletonList(child.getArrowField()));
return new RepeatedTypeMapping(arrowField, type, child);
}
if (type.isPrimitive()) {
return fromParquetPrimitive(type.asPrimitiveType(), name);
} else {
return fromParquetGroup(type.asGroupType(), name);
}
}
/**
* @param type parquet types
* @param name overrides parquet.getName()
* @return the mapping
*/
private TypeMapping fromParquetGroup(GroupType type, String name) {
LogicalTypeAnnotation logicalType = type.getLogicalTypeAnnotation();
if (logicalType == null) {
final FieldType field;
if (type.isRepetition(OPTIONAL)) {
field = FieldType.nullable(new Struct());
} else {
field = FieldType.notNullable(new Struct());
}
List<TypeMapping> typeMappings = fromParquet(type.getFields());
Field arrowField = new Field(name, field, fields(typeMappings));
return new StructTypeMapping(arrowField, type, typeMappings);
} else {
return logicalType
.accept(new LogicalTypeAnnotation.LogicalTypeAnnotationVisitor<TypeMapping>() {
@Override
public Optional<TypeMapping> visit(
LogicalTypeAnnotation.ListLogicalTypeAnnotation listLogicalType) {
List3Levels list3Levels = new List3Levels(type);
TypeMapping child = fromParquet(
list3Levels.getElement(),
null,
list3Levels.getElement().getRepetition());
Field arrowField = new Field(
name,
FieldType.nullable(new ArrowType.List()),
Collections.singletonList(child.getArrowField()));
return of(new ListTypeMapping(arrowField, list3Levels, child));
}
@Override
public Optional<TypeMapping> visit(
LogicalTypeAnnotation.MapLogicalTypeAnnotation mapLogicalType) {
Map3Levels map3levels = new Map3Levels(type);
TypeMapping keyType = fromParquet(
map3levels.getKey(),
null,
map3levels.getKey().getRepetition());
TypeMapping valueType = fromParquet(
map3levels.getValue(),
null,
map3levels.getValue().getRepetition());
Field arrowField = new Field(
name,
FieldType.nullable(new ArrowType.Map(false)),
asList(keyType.getArrowField(), valueType.getArrowField()));
return of(new SchemaMapping.MapTypeMapping(arrowField, map3levels, keyType, valueType));
}
})
.orElseThrow(() -> new UnsupportedOperationException("Unsupported type " + type));
}
}
/**
* @param type parquet types
* @param name overrides parquet.getName()
* @return the mapping
*/
private TypeMapping fromParquetPrimitive(final PrimitiveType type, final String name) {
return type.getPrimitiveTypeName()
.convert(new PrimitiveType.PrimitiveTypeNameConverter<TypeMapping, RuntimeException>() {
private TypeMapping field(ArrowType arrowType) {
final Field field;
if (type.isRepetition(OPTIONAL)) {
field = Field.nullable(name, arrowType);
} else {
field = Field.notNullable(name, arrowType);
}
return new PrimitiveTypeMapping(field, type);
}
@Override
public TypeMapping convertFLOAT(PrimitiveTypeName primitiveTypeName) throws RuntimeException {
return field(new ArrowType.FloatingPoint(FloatingPointPrecision.SINGLE));
}
@Override
public TypeMapping convertDOUBLE(PrimitiveTypeName primitiveTypeName) throws RuntimeException {
return field(new ArrowType.FloatingPoint(FloatingPointPrecision.DOUBLE));
}
@Override
public TypeMapping convertINT32(PrimitiveTypeName primitiveTypeName) throws RuntimeException {
LogicalTypeAnnotation logicalTypeAnnotation = type.getLogicalTypeAnnotation();
if (logicalTypeAnnotation == null) {
return integer(32, true);
}
return logicalTypeAnnotation
.accept(new LogicalTypeAnnotation.LogicalTypeAnnotationVisitor<TypeMapping>() {
@Override
public Optional<TypeMapping> visit(
LogicalTypeAnnotation.DecimalLogicalTypeAnnotation decimalLogicalType) {
return of(decimal(
decimalLogicalType.getPrecision(), decimalLogicalType.getScale()));
}
@Override
public Optional<TypeMapping> visit(
LogicalTypeAnnotation.DateLogicalTypeAnnotation dateLogicalType) {
return of(field(new ArrowType.Date(DateUnit.DAY)));
}
@Override
public Optional<TypeMapping> visit(
LogicalTypeAnnotation.TimeLogicalTypeAnnotation timeLogicalType) {
return timeLogicalType.getUnit() == MILLIS
? of(field(new ArrowType.Time(TimeUnit.MILLISECOND, 32)))
: empty();
}
@Override
public Optional<TypeMapping> visit(
LogicalTypeAnnotation.IntLogicalTypeAnnotation intLogicalType) {
if (intLogicalType.getBitWidth() == 64) {
return empty();
}
return of(integer(intLogicalType.getBitWidth(), intLogicalType.isSigned()));
}
})
.orElseThrow(() -> new IllegalArgumentException("illegal type " + type));
}
@Override
public TypeMapping convertINT64(PrimitiveTypeName primitiveTypeName) throws RuntimeException {
LogicalTypeAnnotation logicalTypeAnnotation = type.getLogicalTypeAnnotation();
if (logicalTypeAnnotation == null) {
return integer(64, true);
}
return logicalTypeAnnotation
.accept(new LogicalTypeAnnotation.LogicalTypeAnnotationVisitor<TypeMapping>() {
@Override
public Optional<TypeMapping> visit(
LogicalTypeAnnotation.DateLogicalTypeAnnotation dateLogicalType) {
return of(field(new ArrowType.Date(DateUnit.DAY)));
}
@Override
public Optional<TypeMapping> visit(
LogicalTypeAnnotation.DecimalLogicalTypeAnnotation decimalLogicalType) {
return of(decimal(
decimalLogicalType.getPrecision(), decimalLogicalType.getScale()));
}
@Override
public Optional<TypeMapping> visit(
LogicalTypeAnnotation.IntLogicalTypeAnnotation intLogicalType) {
return of(integer(intLogicalType.getBitWidth(), intLogicalType.isSigned()));
}
@Override
public Optional<TypeMapping> visit(
LogicalTypeAnnotation.TimeLogicalTypeAnnotation timeLogicalType) {
if (timeLogicalType.getUnit() == MICROS) {
return of(field(new ArrowType.Time(TimeUnit.MICROSECOND, 64)));
} else if (timeLogicalType.getUnit() == NANOS) {
return of(field(new ArrowType.Time(TimeUnit.NANOSECOND, 64)));
}
return empty();
}
@Override
public Optional<TypeMapping> visit(
LogicalTypeAnnotation.TimestampLogicalTypeAnnotation timestampLogicalType) {
switch (timestampLogicalType.getUnit()) {
case MICROS:
return of(field(new ArrowType.Timestamp(
TimeUnit.MICROSECOND, getTimeZone(timestampLogicalType))));
case MILLIS:
return of(field(new ArrowType.Timestamp(
TimeUnit.MILLISECOND, getTimeZone(timestampLogicalType))));
case NANOS:
return of(field(new ArrowType.Timestamp(
TimeUnit.NANOSECOND, getTimeZone(timestampLogicalType))));
}
return empty();
}
private String getTimeZone(
LogicalTypeAnnotation.TimestampLogicalTypeAnnotation timestampLogicalType) {
return timestampLogicalType.isAdjustedToUTC() ? "UTC" : null;
}
})
.orElseThrow(() -> new IllegalArgumentException("illegal type " + type));
}
@Override
public TypeMapping convertINT96(PrimitiveTypeName primitiveTypeName) throws RuntimeException {
if (convertInt96ToArrowTimestamp) {
return field(new ArrowType.Timestamp(TimeUnit.NANOSECOND, null));
} else {
return field(new ArrowType.Binary());
}
}
@Override
public TypeMapping convertFIXED_LEN_BYTE_ARRAY(PrimitiveTypeName primitiveTypeName)
throws RuntimeException {
LogicalTypeAnnotation logicalTypeAnnotation = type.getLogicalTypeAnnotation();
if (logicalTypeAnnotation == null) {
return field(new ArrowType.Binary());
}
return logicalTypeAnnotation
.accept(new LogicalTypeAnnotation.LogicalTypeAnnotationVisitor<TypeMapping>() {
@Override
public Optional<TypeMapping> visit(
LogicalTypeAnnotation.DecimalLogicalTypeAnnotation decimalLogicalType) {
return of(decimal(
decimalLogicalType.getPrecision(), decimalLogicalType.getScale()));
}
})
.orElseThrow(() -> new IllegalArgumentException("illegal type " + type));
}
@Override
public TypeMapping convertBOOLEAN(PrimitiveTypeName primitiveTypeName) throws RuntimeException {
return field(new ArrowType.Bool());
}
@Override
public TypeMapping convertBINARY(PrimitiveTypeName primitiveTypeName) throws RuntimeException {
LogicalTypeAnnotation logicalTypeAnnotation = type.getLogicalTypeAnnotation();
if (logicalTypeAnnotation == null) {
return field(new ArrowType.Binary());
}
return logicalTypeAnnotation
.accept(new LogicalTypeAnnotation.LogicalTypeAnnotationVisitor<TypeMapping>() {
@Override
public Optional<TypeMapping> visit(
LogicalTypeAnnotation.StringLogicalTypeAnnotation stringLogicalType) {
return of(field(new ArrowType.Utf8()));
}
@Override
public Optional<TypeMapping> visit(
LogicalTypeAnnotation.DecimalLogicalTypeAnnotation decimalLogicalType) {
return of(decimal(
decimalLogicalType.getPrecision(), decimalLogicalType.getScale()));
}
})
.orElseThrow(() -> new IllegalArgumentException("illegal type " + type));
}
private TypeMapping decimal(int precision, int scale) {
return field(new ArrowType.Decimal(precision, scale));
}
private TypeMapping integer(int width, boolean signed) {
return field(new ArrowType.Int(width, signed));
}
});
}
/**
* Maps a Parquet and Arrow Schema
* For now does not validate primitive type compatibility
*
* @param arrowSchema an Arrow schema
* @param parquetSchema a Parquet message type
* @return the mapping between the 2
*/
public SchemaMapping map(Schema arrowSchema, MessageType parquetSchema) {
List<TypeMapping> children = map(arrowSchema.getFields(), parquetSchema.getFields());
return new SchemaMapping(arrowSchema, parquetSchema, children);
}
private List<TypeMapping> map(List<Field> arrowFields, List<Type> parquetFields) {
if (arrowFields.size() != parquetFields.size()) {
throw new IllegalArgumentException(
"Can not map schemas as sizes differ: " + arrowFields + " != " + parquetFields);
}
List<TypeMapping> result = new ArrayList<>(arrowFields.size());
for (int i = 0; i < arrowFields.size(); i++) {
Field arrowField = arrowFields.get(i);
Type parquetField = parquetFields.get(i);
result.add(map(arrowField, parquetField));
}
return result;
}
private TypeMapping map(final Field arrowField, final Type parquetField) {
return arrowField.getType().accept(new ArrowTypeVisitor<TypeMapping>() {
@Override
public TypeMapping visit(Null type) {
if (!parquetField.isRepetition(OPTIONAL)) {
throw new IllegalArgumentException("Parquet type can't be null: " + parquetField);
}
return primitive();
}
@Override
public TypeMapping visit(Struct type) {
if (parquetField.isPrimitive()) {
throw new IllegalArgumentException("Parquet type not a group: " + parquetField);
}
GroupType groupType = parquetField.asGroupType();
return new StructTypeMapping(
arrowField, groupType, map(arrowField.getChildren(), groupType.getFields()));
}
@Override
public TypeMapping visit(org.apache.arrow.vector.types.pojo.ArrowType.List type) {
return createListTypeMapping(type);
}
@Override
public TypeMapping visit(ArrowType.LargeList largeList) {
return createListTypeMapping(largeList);
}
@Override
public TypeMapping visit(org.apache.arrow.vector.types.pojo.ArrowType.FixedSizeList type) {
return createListTypeMapping(type);
}
@Override
public TypeMapping visit(ArrowType.ListView type) {
return createListTypeMapping(type);
}
private TypeMapping createListTypeMapping(ArrowType.ComplexType type) {
if (arrowField.getChildren().size() != 1) {
throw new IllegalArgumentException("Invalid list type: " + type);
}
Field arrowChild = arrowField.getChildren().get(0);
if (parquetField.isRepetition(REPEATED)) {
return new RepeatedTypeMapping(arrowField, parquetField, map(arrowChild, parquetField));
}
if (parquetField.isPrimitive()) {
throw new IllegalArgumentException("Parquet type not a group: " + parquetField);
}
List3Levels list3Levels = new List3Levels(parquetField.asGroupType());
if (arrowField.getChildren().size() != 1) {
throw new IllegalArgumentException("invalid arrow list: " + arrowField);
}
return new ListTypeMapping(arrowField, list3Levels, map(arrowChild, list3Levels.getElement()));
}
@Override
public TypeMapping visit(Union type) {
if (parquetField.isPrimitive()) {
throw new IllegalArgumentException("Parquet type not a group: " + parquetField);
}
GroupType groupType = parquetField.asGroupType();
return new UnionTypeMapping(
arrowField, groupType, map(arrowField.getChildren(), groupType.getFields()));
}
@Override
public TypeMapping visit(ArrowType.Map map) {
if (arrowField.getChildren().size() != 2) {
throw new IllegalArgumentException("Invalid map type: " + map);
}
if (parquetField.isPrimitive()) {
throw new IllegalArgumentException("Parquet type not a group: " + parquetField);
}
Map3Levels map3levels = new Map3Levels(parquetField.asGroupType());
if (arrowField.getChildren().size() != 2) {
throw new IllegalArgumentException("invalid arrow map: " + arrowField);
}
Field keyChild = arrowField.getChildren().get(0);
Field valueChild = arrowField.getChildren().get(1);
return new SchemaMapping.MapTypeMapping(
arrowField,
map3levels,
map(keyChild, map3levels.getKey()),
map(valueChild, map3levels.getValue()));
}
@Override
public TypeMapping visit(Int type) {
return primitive();
}
@Override
public TypeMapping visit(FloatingPoint type) {
return primitive();
}
@Override
public TypeMapping visit(Utf8 type) {
return primitive();
}
@Override
public TypeMapping visit(ArrowType.LargeUtf8 largeUtf8) {
return primitive();
}
@Override
public TypeMapping visit(ArrowType.Utf8View type) {
return primitive();
}
@Override
public TypeMapping visit(Binary type) {
return primitive();
}
@Override
public TypeMapping visit(ArrowType.LargeBinary largeBinary) {
return primitive();
}
@Override
public TypeMapping visit(ArrowType.BinaryView type) {
return primitive();
}
@Override
public TypeMapping visit(Bool type) {
return primitive();
}
@Override
public TypeMapping visit(Decimal type) {
return primitive();
}
@Override
public TypeMapping visit(Date type) {
return primitive();
}
@Override
public TypeMapping visit(Time type) {
return primitive();
}
@Override
public TypeMapping visit(Timestamp type) {
return primitive();
}
@Override
public TypeMapping visit(Interval type) {
return primitive();
}
@Override
public TypeMapping visit(ArrowType.Duration duration) {
return primitive();
}
@Override
public TypeMapping visit(ArrowType.FixedSizeBinary fixedSizeBinary) {
return primitive();
}
private TypeMapping primitive() {
if (!parquetField.isPrimitive()) {
throw new IllegalArgumentException("Can not map schemas as one is primitive and the other is not: "
+ arrowField + " != " + parquetField);
}
return new PrimitiveTypeMapping(arrowField, parquetField.asPrimitiveType());
}
});
}
}
|
apache/pulsar | 34,963 | tests/integration/src/test/java/org/apache/pulsar/tests/integration/topologies/PulsarCluster.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.pulsar.tests.integration.topologies;
import static com.google.common.base.Preconditions.checkArgument;
import static org.apache.pulsar.tests.integration.containers.PulsarContainer.BROKER_HTTPS_PORT;
import static org.apache.pulsar.tests.integration.containers.PulsarContainer.BROKER_HTTP_PORT;
import static org.apache.pulsar.tests.integration.containers.PulsarContainer.BROKER_PORT_TLS;
import static org.apache.pulsar.tests.integration.containers.PulsarContainer.CS_PORT;
import static org.apache.pulsar.tests.integration.containers.PulsarContainer.PULSAR_CONTAINERS_LEAVE_RUNNING;
import static org.apache.pulsar.tests.integration.containers.PulsarContainer.ZK_PORT;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.function.Consumer;
import java.util.function.Function;
import lombok.Cleanup;
import lombok.Getter;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.io.IOUtils;
import org.apache.pulsar.client.impl.auth.AuthenticationTls;
import org.apache.pulsar.tests.integration.containers.BKContainer;
import org.apache.pulsar.tests.integration.containers.BrokerContainer;
import org.apache.pulsar.tests.integration.containers.CSContainer;
import org.apache.pulsar.tests.integration.containers.ProxyContainer;
import org.apache.pulsar.tests.integration.containers.PulsarContainer;
import org.apache.pulsar.tests.integration.containers.PulsarInitMetadataContainer;
import org.apache.pulsar.tests.integration.containers.WorkerContainer;
import org.apache.pulsar.tests.integration.containers.ZKContainer;
import org.apache.pulsar.tests.integration.docker.ContainerExecResult;
import org.apache.pulsar.tests.integration.oxia.OxiaContainer;
import org.testcontainers.containers.BindMode;
import org.testcontainers.containers.GenericContainer;
import org.testcontainers.containers.Network;
/**
* Pulsar Cluster in containers.
*/
@Slf4j
public class PulsarCluster {
public static final String ADMIN_SCRIPT = "/pulsar/bin/pulsar-admin";
public static final String CLIENT_SCRIPT = "/pulsar/bin/pulsar-client";
public static final String PULSAR_COMMAND_SCRIPT = "/pulsar/bin/pulsar";
public static final String CURL = "/usr/bin/curl";
/**
* Pulsar Cluster Spec.
*
* @param spec pulsar cluster spec.
* @return the built pulsar cluster
*/
public static PulsarCluster forSpec(PulsarClusterSpec spec) {
return forSpec(spec, Network.newNetwork());
}
public static PulsarCluster forSpec(PulsarClusterSpec spec, Network network) {
checkArgument(network != null, "Network should not be null");
CSContainer csContainer = null;
if (!spec.enableOxia) {
csContainer = new CSContainer(spec.clusterName)
.withNetwork(network)
.withNetworkAliases(CSContainer.NAME);
}
return new PulsarCluster(spec, network, csContainer, false);
}
public static PulsarCluster forSpec(PulsarClusterSpec spec, CSContainer csContainer) {
return new PulsarCluster(spec, csContainer.getNetwork(), csContainer, true);
}
@Getter
private final PulsarClusterSpec spec;
public boolean closeNetworkOnExit = true;
@Getter
private final String clusterName;
private final Network network;
private final ZKContainer zkContainer;
private final OxiaContainer oxiaContainer;
private final CSContainer csContainer;
private final boolean sharedCsContainer;
private final Map<String, BKContainer> bookieContainers;
private final Map<String, BrokerContainer> brokerContainers;
private final Map<String, WorkerContainer> workerContainers;
private final ProxyContainer proxyContainer;
private Map<String, GenericContainer<?>> externalServices = Collections.emptyMap();
private Map<String, Map<String, String>> externalServiceEnvs;
private final Map<String, String> functionWorkerEnvs;
private final List<Integer> functionWorkerAdditionalPorts;
private final String metadataStoreUrl;
private final String configurationMetadataStoreUrl;
private PulsarCluster(PulsarClusterSpec spec, Network network, CSContainer csContainer, boolean sharedCsContainer) {
this.spec = spec;
this.sharedCsContainer = sharedCsContainer;
this.clusterName = spec.clusterName();
if (network != null) {
this.network = network;
} else if (csContainer != null) {
this.network = csContainer.getNetwork();
} else {
this.network = Network.newNetwork();
}
if (spec.enableOxia) {
this.zkContainer = null;
this.oxiaContainer = new OxiaContainer(clusterName);
this.oxiaContainer
.withNetwork(network)
.withNetworkAliases(appendClusterName(OxiaContainer.NAME));
metadataStoreUrl = "oxia://" + oxiaContainer.getServiceAddress();
configurationMetadataStoreUrl = metadataStoreUrl;
} else {
this.oxiaContainer = null;
this.zkContainer = new ZKContainer(clusterName);
this.zkContainer
.withNetwork(network)
.withNetworkAliases(appendClusterName(ZKContainer.NAME))
.withEnv("clusterName", clusterName)
.withEnv("zkServers", appendClusterName(ZKContainer.NAME))
.withEnv("configurationStore", CSContainer.NAME + ":" + CS_PORT)
.withEnv("forceSync", "no")
.withEnv("pulsarNode", appendClusterName("pulsar-broker-0"));
metadataStoreUrl = appendClusterName(ZKContainer.NAME);
configurationMetadataStoreUrl = CSContainer.NAME + ":" + CS_PORT;
zkContainer.setEnableAsyncProfiler(spec.profileZookeeper);
}
this.csContainer = csContainer;
this.bookieContainers = Maps.newTreeMap();
this.brokerContainers = Maps.newTreeMap();
this.workerContainers = Maps.newTreeMap();
this.proxyContainer = new ProxyContainer(clusterName, appendClusterName(ProxyContainer.NAME), spec.enableTls)
.withNetwork(network)
.withNetworkAliases(appendClusterName("pulsar-proxy"))
.withEnv("metadataStoreUrl", metadataStoreUrl)
.withEnv("configurationMetadataStoreUrl", configurationMetadataStoreUrl)
.withEnv("clusterName", clusterName);
proxyContainer.setEnableAsyncProfiler(spec.profileProxy);
// enable mTLS
if (spec.enableTls) {
proxyContainer
.withEnv("webServicePortTls", String.valueOf(BROKER_HTTPS_PORT))
.withEnv("servicePortTls", String.valueOf(BROKER_PORT_TLS))
.withEnv("forwardAuthorizationCredentials", "true")
.withEnv("tlsRequireTrustedClientCertOnConnect", "true")
.withEnv("tlsAllowInsecureConnection", "false")
.withEnv("tlsCertificateFilePath", "/pulsar/certificate-authority/server-keys/proxy.cert.pem")
.withEnv("tlsKeyFilePath", "/pulsar/certificate-authority/server-keys/proxy.key-pk8.pem")
.withEnv("tlsTrustCertsFilePath", "/pulsar/certificate-authority/certs/ca.cert.pem")
.withEnv("brokerClientAuthenticationPlugin", AuthenticationTls.class.getName())
.withEnv("brokerClientAuthenticationParameters", String.format("tlsCertFile:%s,tlsKeyFile:%s",
"/pulsar/certificate-authority/client-keys/admin.cert.pem",
"/pulsar/certificate-authority/client-keys/admin.key-pk8.pem"))
.withEnv("tlsEnabledWithBroker", "true")
.withEnv("brokerClientTrustCertsFilePath", "/pulsar/certificate-authority/certs/ca.cert.pem")
.withEnv("brokerClientCertificateFilePath",
"/pulsar/certificate-authority/server-keys/proxy.cert.pem")
.withEnv("brokerClientKeyFilePath", "/pulsar/certificate-authority/server-keys/proxy.key-pk8.pem");
}
if (spec.proxyEnvs != null) {
spec.proxyEnvs.forEach(this.proxyContainer::withEnv);
}
if (spec.proxyMountFiles != null) {
spec.proxyMountFiles.forEach(this.proxyContainer::withFileSystemBind);
}
if (spec.proxyAdditionalPorts != null) {
spec.proxyAdditionalPorts.forEach(this.proxyContainer::addExposedPort);
}
// create bookies
bookieContainers.putAll(
runNumContainers("bookie", spec.numBookies(), (name) -> {
BKContainer bookieContainer = new BKContainer(clusterName, name)
.withNetwork(network)
.withNetworkAliases(appendClusterName(name))
.withEnv("metadataServiceUri", "metadata-store:" + metadataStoreUrl)
.withEnv("useHostNameAsBookieID", "true")
// Disable fsyncs for tests since they're slow within the containers
.withEnv("journalSyncData", "false")
.withEnv("journalMaxGroupWaitMSec", "0")
.withEnv("clusterName", clusterName)
.withEnv("PULSAR_PREFIX_diskUsageWarnThreshold", "0.95")
.withEnv("diskUsageThreshold", "0.99")
.withEnv("PULSAR_PREFIX_diskUsageLwmThreshold", "0.97")
.withEnv("nettyMaxFrameSizeBytes", String.valueOf(spec.maxMessageSize))
.withEnv("ledgerDirectories", "data/bookkeeper/" + name + "/ledgers")
.withEnv("journalDirectory", "data/bookkeeper/" + name + "/journal");
if (spec.bookkeeperEnvs != null) {
bookieContainer.withEnv(spec.bookkeeperEnvs);
}
if (spec.bookieAdditionalPorts != null) {
spec.bookieAdditionalPorts.forEach(bookieContainer::addExposedPort);
}
bookieContainer.setEnableAsyncProfiler(spec.profileBookie);
return bookieContainer;
})
);
// create brokers
brokerContainers.putAll(
runNumContainers("broker", spec.numBrokers(), (name) -> {
BrokerContainer brokerContainer =
new BrokerContainer(clusterName, appendClusterName(name), spec.enableTls)
.withNetwork(network)
.withNetworkAliases(appendClusterName(name))
.withEnv("metadataStoreUrl", metadataStoreUrl)
.withEnv("configurationMetadataStoreUrl", configurationMetadataStoreUrl)
.withEnv("clusterName", clusterName)
.withEnv("brokerServiceCompactionMonitorIntervalInSeconds", "1")
.withEnv("loadBalancerOverrideBrokerNicSpeedGbps", "1")
// used in s3 tests
.withEnv("AWS_ACCESS_KEY_ID", "accesskey").withEnv("AWS_SECRET_KEY",
"secretkey")
.withEnv("maxMessageSize", "" + spec.maxMessageSize);
if (spec.enableTls) {
// enable mTLS
brokerContainer
.withEnv("webServicePortTls", String.valueOf(BROKER_HTTPS_PORT))
.withEnv("brokerServicePortTls", String.valueOf(BROKER_PORT_TLS))
.withEnv("authenticateOriginalAuthData", "true")
.withEnv("tlsAllowInsecureConnection", "false")
.withEnv("tlsRequireTrustedClientCertOnConnect", "true")
.withEnv("tlsTrustCertsFilePath", "/pulsar/certificate-authority/certs/ca"
+ ".cert.pem")
.withEnv("tlsCertificateFilePath",
"/pulsar/certificate-authority/server-keys/broker.cert.pem")
.withEnv("tlsKeyFilePath",
"/pulsar/certificate-authority/server-keys/broker.key-pk8.pem");
}
if (spec.queryLastMessage) {
brokerContainer.withEnv("bookkeeperExplicitLacIntervalInMills", "10");
brokerContainer.withEnv("bookkeeperUseV2WireProtocol", "false");
}
if (spec.brokerEnvs != null) {
brokerContainer.withEnv(spec.brokerEnvs);
}
if (spec.brokerMountFiles != null) {
spec.brokerMountFiles.forEach(brokerContainer::withFileSystemBind);
}
if (spec.brokerAdditionalPorts() != null) {
spec.brokerAdditionalPorts().forEach(brokerContainer::addExposedPort);
}
brokerContainer.setEnableAsyncProfiler(spec.profileBroker);
return brokerContainer;
}
));
if (spec.dataContainer != null) {
if (!sharedCsContainer && csContainer != null) {
csContainer.withVolumesFrom(spec.dataContainer, BindMode.READ_WRITE);
}
if (zkContainer != null) {
zkContainer.withVolumesFrom(spec.dataContainer, BindMode.READ_WRITE);
}
proxyContainer.withVolumesFrom(spec.dataContainer, BindMode.READ_WRITE);
bookieContainers.values().forEach(c -> c.withVolumesFrom(spec.dataContainer, BindMode.READ_WRITE));
brokerContainers.values().forEach(c -> c.withVolumesFrom(spec.dataContainer, BindMode.READ_WRITE));
workerContainers.values().forEach(c -> c.withVolumesFrom(spec.dataContainer, BindMode.READ_WRITE));
}
spec.classPathVolumeMounts.forEach((key, value) -> {
if (zkContainer != null) {
zkContainer.withClasspathResourceMapping(key, value, BindMode.READ_WRITE);
}
if (!sharedCsContainer && csContainer != null) {
csContainer.withClasspathResourceMapping(key, value, BindMode.READ_WRITE);
}
proxyContainer.withClasspathResourceMapping(key, value, BindMode.READ_WRITE);
bookieContainers.values().forEach(c -> c.withClasspathResourceMapping(key, value, BindMode.READ_WRITE));
brokerContainers.values().forEach(c -> c.withClasspathResourceMapping(key, value, BindMode.READ_WRITE));
workerContainers.values().forEach(c -> c.withClasspathResourceMapping(key, value, BindMode.READ_WRITE));
});
functionWorkerEnvs = spec.functionWorkerEnvs;
functionWorkerAdditionalPorts = spec.functionWorkerAdditionalPorts;
}
public String getPlainTextServiceUrl() {
return proxyContainer.getPlainTextServiceUrl();
}
public void forEachContainer(Consumer<GenericContainer<?>> consumer) {
if (zkContainer != null) {
consumer.accept(zkContainer);
}
if (csContainer != null) {
consumer.accept(csContainer);
}
if (oxiaContainer != null) {
consumer.accept(oxiaContainer);
}
if (proxyContainer != null) {
consumer.accept(proxyContainer);
}
bookieContainers.values().forEach(consumer);
brokerContainers.values().forEach(consumer);
workerContainers.values().forEach(consumer);
externalServices.values().forEach(consumer);
}
public String getHttpServiceUrl() {
return proxyContainer.getHttpServiceUrl();
}
public String getAnyBrokersHttpsServiceUrl() {
return getAnyBroker().getHttpsServiceUrl();
}
public String getAnyBrokersServiceUrlTls() {
return getAnyBroker().getServiceUrlTls();
}
public String getAllBrokersHttpServiceUrl() {
String multiUrl = "http://";
Iterator<BrokerContainer> brokers = getBrokers().iterator();
while (brokers.hasNext()) {
BrokerContainer broker = brokers.next();
multiUrl += broker.getHost() + ":" + broker.getMappedPort(BROKER_HTTP_PORT);
if (brokers.hasNext()) {
multiUrl += ",";
}
}
return multiUrl;
}
public String getZKConnString() {
return zkContainer.getHost() + ":" + zkContainer.getMappedPort(ZK_PORT);
}
public String getCSConnString() {
return csContainer.getHost() + ":" + csContainer.getMappedPort(CS_PORT);
}
public Network getNetwork() {
return network;
}
public Map<String, GenericContainer<?>> getExternalServices() {
return externalServices;
}
public void start() throws Exception {
start(true);
}
public void start(boolean doInit) throws Exception {
if (!spec.enableOxia) {
// start the local zookeeper
zkContainer.start();
log.info("Successfully started local zookeeper container.");
// start the configuration store
if (!sharedCsContainer) {
csContainer.start();
log.info("Successfully started configuration store container.");
}
} else {
oxiaContainer.start();
}
if (doInit) {
// Run cluster metadata initialization
@Cleanup
PulsarInitMetadataContainer init = new PulsarInitMetadataContainer(
network,
clusterName,
metadataStoreUrl,
configurationMetadataStoreUrl,
appendClusterName("pulsar-broker-0")
);
init.initialize();
}
// start bookies
bookieContainers.values().forEach(BKContainer::start);
log.info("Successfully started {} bookie containers.", bookieContainers.size());
// start brokers
this.startAllBrokers();
log.info("Successfully started {} broker containers.", brokerContainers.size());
// create proxy
proxyContainer.start();
log.info("Successfully started pulsar proxy.");
log.info("Pulsar cluster {} is up running:", clusterName);
log.info("\tBinary Service Url : {}", getPlainTextServiceUrl());
log.info("\tHttp Service Url : {}", getHttpServiceUrl());
// start external services
this.externalServices = spec.externalServices;
this.externalServiceEnvs = spec.externalServiceEnvs;
if (null != externalServices) {
externalServices.entrySet().parallelStream().forEach(service -> {
GenericContainer<?> serviceContainer = service.getValue();
serviceContainer.withNetwork(network);
serviceContainer.withNetworkAliases(service.getKey());
if (null != externalServiceEnvs && null != externalServiceEnvs.get(service.getKey())) {
Map<String, String> env =
externalServiceEnvs.getOrDefault(service.getKey(), Collections.emptyMap());
serviceContainer.withEnv(env);
}
PulsarContainer.configureLeaveContainerRunning(serviceContainer);
serviceContainer.start();
log.info("Successfully started external service {}.", service.getKey());
});
}
}
public void startService(String networkAlias,
GenericContainer<?> serviceContainer) {
log.info("Starting external service {} ...", networkAlias);
serviceContainer.withNetwork(network);
serviceContainer.withNetworkAliases(networkAlias);
PulsarContainer.configureLeaveContainerRunning(serviceContainer);
serviceContainer.start();
log.info("Successfully start external service {}", networkAlias);
}
public static void stopService(String networkAlias,
GenericContainer<?> serviceContainer) {
if (PULSAR_CONTAINERS_LEAVE_RUNNING) {
logIgnoringStopDueToLeaveRunning();
return;
}
log.info("Stopping external service {} ...", networkAlias);
serviceContainer.stop();
log.info("Successfully stop external service {}", networkAlias);
}
private static <T extends PulsarContainer> Map<String, T> runNumContainers(String serviceName,
int numContainers,
Function<String, T> containerCreator) {
Map<String, T> containers = Maps.newTreeMap();
for (int i = 0; i < numContainers; i++) {
String name = "pulsar-" + serviceName + "-" + i;
T container = containerCreator.apply(name);
containers.put(name, container);
}
return containers;
}
public synchronized void stop() {
if (PULSAR_CONTAINERS_LEAVE_RUNNING) {
logIgnoringStopDueToLeaveRunning();
return;
}
stopInParallel(workerContainers.values());
if (externalServices != null) {
stopInParallel(externalServices.values());
}
if (null != proxyContainer) {
proxyContainer.stop();
}
stopInParallel(brokerContainers.values());
stopInParallel(bookieContainers.values());
if (!sharedCsContainer && null != csContainer) {
csContainer.stop();
}
if (null != zkContainer) {
zkContainer.stop();
}
if (oxiaContainer != null) {
oxiaContainer.stop();
}
if (closeNetworkOnExit) {
try {
network.close();
} catch (Exception e) {
log.info("Failed to shutdown network for pulsar cluster {}", clusterName, e);
}
}
}
private static void stopInParallel(Collection<? extends GenericContainer<?>> containers) {
containers.parallelStream()
.filter(Objects::nonNull)
.forEach(GenericContainer::stop);
}
public synchronized void setupFunctionWorkers(String suffix, FunctionRuntimeType runtimeType,
int numFunctionWorkers) {
switch (runtimeType) {
case THREAD:
startFunctionWorkersWithThreadContainerFactory(suffix, numFunctionWorkers);
break;
case PROCESS:
startFunctionWorkersWithProcessContainerFactory(suffix, numFunctionWorkers);
break;
}
}
private void startFunctionWorkersWithProcessContainerFactory(String suffix, int numFunctionWorkers) {
workerContainers.putAll(runNumContainers(
"functions-worker-process-" + suffix,
numFunctionWorkers,
(name) -> createWorkerContainer(name)
));
this.startWorkers();
}
private WorkerContainer createWorkerContainer(String name) {
String serviceUrl = "pulsar://pulsar-broker-0:" + PulsarContainer.BROKER_PORT;
String httpServiceUrl = "http://pulsar-broker-0:" + PulsarContainer.BROKER_HTTP_PORT;
WorkerContainer workerContainer = new WorkerContainer(clusterName, name)
.withNetwork(network)
.withNetworkAliases(name)
// worker settings
.withEnv("PF_workerId", name)
.withEnv("PF_workerHostname", name)
.withEnv("PF_workerPort", "" + BROKER_HTTP_PORT)
.withEnv("PF_pulsarFunctionsCluster", clusterName)
.withEnv("PF_pulsarServiceUrl", serviceUrl)
.withEnv("PF_pulsarWebServiceUrl", httpServiceUrl)
// script
.withEnv("clusterName", clusterName)
.withEnv("zookeeperServers", ZKContainer.NAME)
// bookkeeper tools
.withEnv("zkServers", ZKContainer.NAME)
.withEnv(functionWorkerEnvs)
.withExposedPorts(functionWorkerAdditionalPorts.toArray(new Integer[0]));
workerContainer.setEnableAsyncProfiler(spec.profileFunctionWorker);
return workerContainer;
}
private void startFunctionWorkersWithThreadContainerFactory(String suffix, int numFunctionWorkers) {
workerContainers.putAll(runNumContainers(
"functions-worker-thread-" + suffix,
numFunctionWorkers,
(name) -> createWorkerContainer(name)
.withEnv("PF_functionRuntimeFactoryClassName",
"org.apache.pulsar.functions.runtime.thread.ThreadRuntimeFactory")
.withEnv("PF_functionRuntimeFactoryConfigs_threadGroupName", "pf-container-group")
));
this.startWorkers();
}
public synchronized void startWorkers() {
// Start workers that have been initialized
workerContainers.values().parallelStream().forEach(WorkerContainer::start);
log.info("Successfully started {} worker containers.", workerContainers.size());
}
public synchronized void stopWorker(String workerName) {
if (PULSAR_CONTAINERS_LEAVE_RUNNING) {
logIgnoringStopDueToLeaveRunning();
return;
}
// Stop the named worker.
WorkerContainer worker = workerContainers.get(workerName);
if (worker == null) {
log.warn("Failed to find the worker to stop ({})", workerName);
return;
}
worker.stop();
workerContainers.remove(workerName);
log.info("Worker {} stopped and removed from the map of worker containers", workerName);
}
public synchronized void stopWorkers() {
if (PULSAR_CONTAINERS_LEAVE_RUNNING) {
logIgnoringStopDueToLeaveRunning();
return;
}
// Stop workers that have been initialized
workerContainers.values().parallelStream().forEach(WorkerContainer::stop);
workerContainers.clear();
}
public void startContainers(Map<String, GenericContainer<?>> containers) {
containers.forEach((name, container) -> {
PulsarContainer.configureLeaveContainerRunning(container);
container
.withNetwork(network)
.withNetworkAliases(name)
.start();
log.info("Successfully start container {}.", name);
});
}
public static void stopContainers(Map<String, GenericContainer<?>> containers) {
if (PULSAR_CONTAINERS_LEAVE_RUNNING) {
logIgnoringStopDueToLeaveRunning();
return;
}
containers.values().parallelStream().forEach(GenericContainer::stop);
log.info("Successfully stop containers : {}", containers);
}
private static void logIgnoringStopDueToLeaveRunning() {
log.warn("Ignoring stop due to PULSAR_CONTAINERS_LEAVE_RUNNING=true.");
}
public BrokerContainer getAnyBroker() {
return getAnyContainer(brokerContainers, "pulsar-broker");
}
public synchronized WorkerContainer getAnyWorker() {
return getAnyContainer(workerContainers, "pulsar-functions-worker");
}
public synchronized List<WorkerContainer> getAlWorkers() {
return new ArrayList<WorkerContainer>(workerContainers.values());
}
public BrokerContainer getBroker(int index) {
return getAnyContainer(brokerContainers, "pulsar-broker", index);
}
public synchronized WorkerContainer getWorker(int index) {
return getAnyContainer(workerContainers, "pulsar-functions-worker", index);
}
public synchronized WorkerContainer getWorker(String workerName) {
return workerContainers.get(workerName);
}
private <T> T getAnyContainer(Map<String, T> containers, String serviceName) {
List<T> containerList = Lists.newArrayList();
containerList.addAll(containers.values());
Collections.shuffle(containerList);
checkArgument(!containerList.isEmpty(), "No " + serviceName + " is alive");
return containerList.get(0);
}
private <T> T getAnyContainer(Map<String, T> containers, String serviceName, int index) {
checkArgument(!containers.isEmpty(), "No " + serviceName + " is alive");
checkArgument((index >= 0 && index < containers.size()), "Index : " + index + " is out range");
return containers.get(serviceName.toLowerCase() + "-" + index);
}
public Collection<BrokerContainer> getBrokers() {
return brokerContainers.values();
}
public ProxyContainer getProxy() {
return proxyContainer;
}
public Collection<BKContainer> getBookies() {
return bookieContainers.values();
}
public ZKContainer getZooKeeper() {
return zkContainer;
}
public ContainerExecResult runAdminCommandOnAnyBroker(String... commands) throws Exception {
return runCommandOnAnyBrokerWithScript(ADMIN_SCRIPT, commands);
}
public ContainerExecResult runPulsarBaseCommandOnAnyBroker(String... commands) throws Exception {
return runCommandOnAnyBrokerWithScript(PULSAR_COMMAND_SCRIPT, commands);
}
private ContainerExecResult runCommandOnAnyBrokerWithScript(String scriptType, String... commands)
throws Exception {
BrokerContainer container = getAnyBroker();
String[] cmds = new String[commands.length + 1];
cmds[0] = scriptType;
System.arraycopy(commands, 0, cmds, 1, commands.length);
return container.execCmd(cmds);
}
public void stopAllBrokers() {
brokerContainers.values().forEach(BrokerContainer::stop);
}
public void startAllBrokers() {
brokerContainers.values().forEach(BrokerContainer::start);
}
public void stopAllBookies() {
bookieContainers.values().forEach(BKContainer::stop);
}
public void startAllBookies() {
bookieContainers.values().forEach(BKContainer::start);
}
public void stopZooKeeper() {
zkContainer.stop();
}
public void startZooKeeper() {
zkContainer.start();
}
public ContainerExecResult createNamespace(String nsName) throws Exception {
return runAdminCommandOnAnyBroker(
"namespaces", "create", "public/" + nsName,
"--clusters", clusterName);
}
public ContainerExecResult createPartitionedTopic(String topicName, int partitions) throws Exception {
return runAdminCommandOnAnyBroker(
"topics", "create-partitioned-topic", topicName,
"-p", String.valueOf(partitions));
}
public ContainerExecResult enableDeduplication(String nsName, boolean enabled) throws Exception {
return runAdminCommandOnAnyBroker(
"namespaces", "set-deduplication", "public/" + nsName,
enabled ? "--enable" : "--disable");
}
public String getFunctionLogs(String name) {
StringBuilder logs = new StringBuilder();
for (WorkerContainer container : getAlWorkers()) {
try {
String logFile = "/pulsar/logs/functions/public/default/" + name + "/" + name + "-0.log";
logs.append(container.<String>copyFileFromContainer(logFile, (inputStream) -> {
return IOUtils.toString(inputStream, "utf-8");
}));
} catch (Exception e) {
log.error("Failed to get function logs from container {}", container.getContainerName(), e);
}
}
return logs.toString();
}
public void dumpFunctionLogs(String name) {
for (WorkerContainer container : getAlWorkers()) {
log.info("Trying to get function {} logs from container {}", name, container.getContainerName());
try {
String logFile = "/pulsar/logs/functions/public/default/" + name + "/" + name + "-0.log";
String logs = container.<String>copyFileFromContainer(logFile, (inputStream) -> {
return IOUtils.toString(inputStream, "utf-8");
});
log.info("Function {} logs {}", name, logs);
} catch (com.github.dockerjava.api.exception.NotFoundException notFound) {
log.info("Cannot download {} logs from {} not found exception {}", name, container.getContainerName(),
notFound.toString());
} catch (Throwable err) {
log.info("Cannot download {} logs from {}", name, container.getContainerName(), err);
}
}
}
private String appendClusterName(String name) {
return sharedCsContainer ? clusterName + "-" + name : name;
}
public BKContainer getAnyBookie() {
return getAnyContainer(bookieContainers, "bookie");
}
}
|
apache/uniffle | 35,072 | integration-test/common/src/test/java/org/apache/uniffle/test/RemoteMergeShuffleWithRssClientTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.uniffle.test;
import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.stream.Collectors;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import io.netty.buffer.ByteBuf;
import org.apache.hadoop.io.IntWritable;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Timeout;
import org.junit.jupiter.api.io.TempDir;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.ValueSource;
import org.roaringbitmap.longlong.Roaring64NavigableMap;
import org.apache.uniffle.client.factory.ShuffleClientFactory;
import org.apache.uniffle.client.impl.ShuffleWriteClientImpl;
import org.apache.uniffle.client.record.Record;
import org.apache.uniffle.client.record.reader.KeyValueReader;
import org.apache.uniffle.client.record.reader.RMRecordsReader;
import org.apache.uniffle.client.record.writer.Combiner;
import org.apache.uniffle.client.record.writer.SumByKeyCombiner;
import org.apache.uniffle.common.PartitionRange;
import org.apache.uniffle.common.RemoteStorageInfo;
import org.apache.uniffle.common.ShuffleBlockInfo;
import org.apache.uniffle.common.ShuffleDataDistributionType;
import org.apache.uniffle.common.ShuffleServerInfo;
import org.apache.uniffle.common.config.RssConf;
import org.apache.uniffle.common.rpc.ServerType;
import org.apache.uniffle.common.serializer.Serializer;
import org.apache.uniffle.common.serializer.SerializerFactory;
import org.apache.uniffle.common.serializer.SerializerInstance;
import org.apache.uniffle.common.serializer.SerializerUtils;
import org.apache.uniffle.common.util.BlockIdLayout;
import org.apache.uniffle.common.util.ChecksumUtils;
import org.apache.uniffle.coordinator.CoordinatorConf;
import org.apache.uniffle.proto.RssProtos;
import org.apache.uniffle.server.ShuffleServerConf;
import org.apache.uniffle.server.buffer.ShuffleBufferType;
import org.apache.uniffle.storage.util.StorageType;
import static org.apache.uniffle.coordinator.CoordinatorConf.COORDINATOR_DYNAMIC_CLIENT_CONF_ENABLED;
import static org.junit.jupiter.api.Assertions.assertEquals;
public class RemoteMergeShuffleWithRssClientTest extends ShuffleReadWriteBase {
private static final int SHUFFLE_ID = 0;
private static final int PARTITION_ID = 0;
private static final int RECORD_NUMBER = 1009;
private static ShuffleServerInfo shuffleServerInfo;
private ShuffleWriteClientImpl shuffleWriteClientImpl;
@BeforeAll
public static void setupServers(@TempDir File tmpDir) throws Exception {
CoordinatorConf coordinatorConf = coordinatorConfWithoutPort();
coordinatorConf.setBoolean(COORDINATOR_DYNAMIC_CLIENT_CONF_ENABLED, false);
storeCoordinatorConf(coordinatorConf);
ShuffleServerConf shuffleServerConf =
shuffleServerConfWithoutPort(0, tmpDir, ServerType.GRPC_NETTY);
shuffleServerConf.set(ShuffleServerConf.SERVER_MERGE_ENABLE, true);
shuffleServerConf.set(ShuffleServerConf.SERVER_MERGE_DEFAULT_MERGED_BLOCK_SIZE, "1k");
shuffleServerConf.set(
ShuffleServerConf.SERVER_SHUFFLE_BUFFER_TYPE, ShuffleBufferType.SKIP_LIST);
shuffleServerConf.setLong("rss.server.app.expired.withoutHeartbeat", 10000000);
shuffleServerConf.setString("rss.storage.type", StorageType.LOCALFILE.name());
storeShuffleServerConf(shuffleServerConf);
startServersWithRandomPorts();
shuffleServerInfo =
new ShuffleServerInfo(
LOCALHOST,
nettyShuffleServers.get(0).getGrpcPort(),
nettyShuffleServers.get(0).getNettyPort());
}
public void createClient(String clientType) {
shuffleWriteClientImpl =
new ShuffleWriteClientImpl(
ShuffleClientFactory.newWriteBuilder()
.clientType(clientType)
.retryMax(3)
.retryIntervalMax(1000)
.heartBeatThreadNum(1)
.replica(1)
.replicaWrite(1)
.replicaRead(1)
.replicaSkipEnabled(true)
.dataTransferPoolSize(1)
.dataCommitPoolSize(1)
.unregisterThreadPoolSize(10)
.unregisterRequestTimeSec(10));
}
@AfterEach
public void closeClient() {
shuffleWriteClientImpl.close();
}
@ParameterizedTest
@ValueSource(
strings = {
"org.apache.hadoop.io.Text,org.apache.hadoop.io.IntWritable,GRPC,true",
"org.apache.hadoop.io.Text,org.apache.hadoop.io.IntWritable,GRPC_NETTY,true",
"org.apache.hadoop.io.Text,org.apache.hadoop.io.IntWritable,GRPC,false",
"org.apache.hadoop.io.Text,org.apache.hadoop.io.IntWritable,GRPC_NETTY,false",
"java.lang.String,java.lang.Integer,GRPC",
"java.lang.String,java.lang.Integer,GRPC_NETTY",
"org.apache.uniffle.common.serializer.SerializerUtils$SomeClass,java.lang.Integer,GRPC",
"org.apache.uniffle.common.serializer.SerializerUtils$SomeClass,java.lang.Integer,GRPC_NETTY",
})
@Timeout(10)
public void remoteMergeWriteReadTest(String classes) throws Exception {
// 1 basic parameter
final String[] classArray = classes.split(",");
final String keyClassName = classArray[0];
final String valueClassName = classArray[1];
final Class keyClass = SerializerUtils.getClassByName(keyClassName);
final Class valueClass = SerializerUtils.getClassByName(valueClassName);
final String clientType = classArray[2];
final boolean raw = classArray.length > 3 ? Boolean.parseBoolean(classArray[3]) : false;
final Comparator comparator = SerializerUtils.getComparator(keyClass);
final RssConf rssConf = new RssConf();
// 2 register shuffle
createClient(clientType);
String testAppId = "remoteMergeWriteReadTest" + classes;
shuffleWriteClientImpl.registerShuffle(
shuffleServerInfo,
testAppId,
SHUFFLE_ID,
Lists.newArrayList(new PartitionRange(0, 0)),
new RemoteStorageInfo(""),
ShuffleDataDistributionType.NORMAL,
0,
RssProtos.MergeContext.newBuilder()
.setKeyClass(keyClass.getName())
.setValueClass(valueClass.getName())
.setComparatorClass(comparator.getClass().getName())
.setMergedBlockSize(-1)
.setMergeClassLoader("")
.build());
// 3 report shuffle result
// task 0 attempt 0 generate three blocks
BlockIdLayout layout = BlockIdLayout.from(rssConf);
List<ShuffleBlockInfo> blocks1 = new ArrayList<>();
blocks1.add(
createShuffleBlockForRemoteMerge(
rssConf,
layout,
0,
PARTITION_ID,
Lists.newArrayList(shuffleServerInfo),
keyClass,
valueClass,
0,
5,
RECORD_NUMBER,
1));
blocks1.add(
createShuffleBlockForRemoteMerge(
rssConf,
layout,
0,
PARTITION_ID,
Lists.newArrayList(shuffleServerInfo),
keyClass,
valueClass,
2,
5,
RECORD_NUMBER,
1));
blocks1.add(
createShuffleBlockForRemoteMerge(
rssConf,
layout,
0,
PARTITION_ID,
Lists.newArrayList(shuffleServerInfo),
keyClass,
valueClass,
4,
5,
RECORD_NUMBER,
1));
shuffleWriteClientImpl.sendShuffleData(testAppId, blocks1, () -> false);
// task 1 attempt 0 generate two blocks
List<ShuffleBlockInfo> blocks2 = new ArrayList<>();
blocks2.add(
createShuffleBlockForRemoteMerge(
rssConf,
layout,
1,
PARTITION_ID,
Lists.newArrayList(shuffleServerInfo),
keyClass,
valueClass,
1,
5,
RECORD_NUMBER,
1));
blocks2.add(
createShuffleBlockForRemoteMerge(
rssConf,
layout,
1,
PARTITION_ID,
Lists.newArrayList(shuffleServerInfo),
keyClass,
valueClass,
3,
5,
RECORD_NUMBER,
1));
shuffleWriteClientImpl.sendShuffleData(testAppId, blocks2, () -> false);
Map<Integer, List<ShuffleServerInfo>> partitionToServers =
ImmutableMap.of(PARTITION_ID, Lists.newArrayList(shuffleServerInfo));
// 4 report shuffle result
Map<Integer, Set<Long>> ptb = ImmutableMap.of(PARTITION_ID, new HashSet());
ptb.get(PARTITION_ID)
.addAll(blocks1.stream().map(s -> s.getBlockId()).collect(Collectors.toList()));
ptb.get(PARTITION_ID)
.addAll(blocks2.stream().map(s -> s.getBlockId()).collect(Collectors.toList()));
Map<ShuffleServerInfo, Map<Integer, Set<Long>>> serverToPartitionToBlockIds = new HashMap();
serverToPartitionToBlockIds.put(shuffleServerInfo, ptb);
shuffleWriteClientImpl.reportShuffleResult(
serverToPartitionToBlockIds, testAppId, SHUFFLE_ID, 0, 1);
shuffleWriteClientImpl.reportShuffleResult(
serverToPartitionToBlockIds, testAppId, SHUFFLE_ID, 1, 1);
// 5 report unique blocks
Roaring64NavigableMap uniqueBlockIds = Roaring64NavigableMap.bitmapOf();
ptb.get(PARTITION_ID).stream().forEach(block -> uniqueBlockIds.add(block));
shuffleWriteClientImpl.startSortMerge(
Sets.newHashSet(shuffleServerInfo), testAppId, SHUFFLE_ID, PARTITION_ID, uniqueBlockIds);
// 6 read result
Map<Integer, List<ShuffleServerInfo>> serverInfoMap =
ImmutableMap.of(PARTITION_ID, Lists.newArrayList(shuffleServerInfo));
RMRecordsReader reader =
new RMRecordsReader(
testAppId,
SHUFFLE_ID,
Sets.newHashSet(PARTITION_ID),
serverInfoMap,
rssConf,
keyClass,
valueClass,
comparator,
raw,
null,
false,
null,
clientType);
reader.start();
int index = 0;
KeyValueReader keyValueReader = reader.keyValueReader();
while (keyValueReader.hasNext()) {
Record record = keyValueReader.next();
assertEquals(SerializerUtils.genData(keyClass, index), record.getKey());
assertEquals(SerializerUtils.genData(valueClass, index), record.getValue());
index++;
}
assertEquals(5 * RECORD_NUMBER, index);
shuffleWriteClientImpl.unregisterShuffle(testAppId);
}
@ParameterizedTest
@ValueSource(
strings = {
"org.apache.hadoop.io.Text,org.apache.hadoop.io.IntWritable,GRPC,true",
"org.apache.hadoop.io.Text,org.apache.hadoop.io.IntWritable,GRPC_NETTY,true",
"org.apache.hadoop.io.Text,org.apache.hadoop.io.IntWritable,GRPC,false",
"org.apache.hadoop.io.Text,org.apache.hadoop.io.IntWritable,GRPC_NETTY,false",
"java.lang.String,java.lang.Integer,GRPC",
"java.lang.String,java.lang.Integer,GRPC_NETTY",
"org.apache.uniffle.common.serializer.SerializerUtils$SomeClass,java.lang.Integer,GRPC",
"org.apache.uniffle.common.serializer.SerializerUtils$SomeClass,java.lang.Integer,GRPC_NETTY",
})
@Timeout(10)
public void remoteMergeWriteReadTestWithCombine(String classes) throws Exception {
// 1 basic parameter
final String[] classArray = classes.split(",");
final String keyClassName = classArray[0];
final String valueClassName = classArray[1];
final Class keyClass = SerializerUtils.getClassByName(keyClassName);
final Class valueClass = SerializerUtils.getClassByName(valueClassName);
final String clientType = classArray[2];
final boolean raw = classArray.length > 3 ? Boolean.parseBoolean(classArray[3]) : false;
final Comparator comparator = SerializerUtils.getComparator(keyClass);
final RssConf rssConf = new RssConf();
SerializerFactory factory = new SerializerFactory(rssConf);
Serializer serializer = factory.getSerializer(keyClass);
SerializerInstance serializerInstance = serializer.newInstance();
final Combiner combiner = new SumByKeyCombiner(raw, serializerInstance, keyClass, valueClass);
// 2 register shuffle
createClient(clientType);
String testAppId = "remoteMergeWriteReadTestWithCombine" + classes;
shuffleWriteClientImpl.registerShuffle(
shuffleServerInfo,
testAppId,
SHUFFLE_ID,
Lists.newArrayList(new PartitionRange(0, 0)),
new RemoteStorageInfo(""),
ShuffleDataDistributionType.NORMAL,
0,
RssProtos.MergeContext.newBuilder()
.setKeyClass(keyClass.getName())
.setValueClass(valueClass.getName())
.setComparatorClass(comparator.getClass().getName())
.setMergedBlockSize(-1)
.setMergeClassLoader("")
.build());
// 3 report shuffle result
// task 0 attempt 0 generate three blocks
BlockIdLayout layout = BlockIdLayout.from(rssConf);
List<ShuffleBlockInfo> blocks1 = new ArrayList<>();
blocks1.add(
createShuffleBlockForRemoteMerge(
rssConf,
layout,
0,
PARTITION_ID,
Lists.newArrayList(shuffleServerInfo),
keyClass,
valueClass,
0,
3,
RECORD_NUMBER,
1));
blocks1.add(
createShuffleBlockForRemoteMerge(
rssConf,
layout,
0,
PARTITION_ID,
Lists.newArrayList(shuffleServerInfo),
keyClass,
valueClass,
1,
3,
RECORD_NUMBER,
1));
blocks1.add(
createShuffleBlockForRemoteMerge(
rssConf,
layout,
0,
PARTITION_ID,
Lists.newArrayList(shuffleServerInfo),
keyClass,
valueClass,
2,
3,
RECORD_NUMBER,
1));
shuffleWriteClientImpl.sendShuffleData(testAppId, blocks1, () -> false);
// task 1 attempt 0 generate two blocks
List<ShuffleBlockInfo> blocks2 = new ArrayList<>();
blocks2.add(
createShuffleBlockForRemoteMerge(
rssConf,
layout,
1,
PARTITION_ID,
Lists.newArrayList(shuffleServerInfo),
keyClass,
valueClass,
0,
3,
RECORD_NUMBER,
1));
blocks2.add(
createShuffleBlockForRemoteMerge(
rssConf,
layout,
1,
PARTITION_ID,
Lists.newArrayList(shuffleServerInfo),
keyClass,
valueClass,
2,
3,
RECORD_NUMBER,
1));
shuffleWriteClientImpl.sendShuffleData(testAppId, blocks2, () -> false);
Map<Integer, List<ShuffleServerInfo>> partitionToServers =
ImmutableMap.of(PARTITION_ID, Lists.newArrayList(shuffleServerInfo));
// 4 report shuffle result
Map<Integer, Set<Long>> ptb = ImmutableMap.of(PARTITION_ID, new HashSet());
ptb.get(PARTITION_ID)
.addAll(blocks1.stream().map(s -> s.getBlockId()).collect(Collectors.toList()));
ptb.get(PARTITION_ID)
.addAll(blocks2.stream().map(s -> s.getBlockId()).collect(Collectors.toList()));
Map<ShuffleServerInfo, Map<Integer, Set<Long>>> serverToPartitionToBlockIds = new HashMap();
serverToPartitionToBlockIds.put(shuffleServerInfo, ptb);
shuffleWriteClientImpl.reportShuffleResult(
serverToPartitionToBlockIds, testAppId, SHUFFLE_ID, 0, 1);
shuffleWriteClientImpl.reportShuffleResult(
serverToPartitionToBlockIds, testAppId, SHUFFLE_ID, 1, 1);
// 5 report unique blocks
Roaring64NavigableMap uniqueBlockIds = Roaring64NavigableMap.bitmapOf();
ptb.get(PARTITION_ID).stream().forEach(block -> uniqueBlockIds.add(block));
shuffleWriteClientImpl.startSortMerge(
Sets.newHashSet(shuffleServerInfo), testAppId, SHUFFLE_ID, PARTITION_ID, uniqueBlockIds);
// 6 read result
Map<Integer, List<ShuffleServerInfo>> serverInfoMap =
ImmutableMap.of(PARTITION_ID, Lists.newArrayList(shuffleServerInfo));
RMRecordsReader reader =
new RMRecordsReader(
testAppId,
SHUFFLE_ID,
Sets.newHashSet(PARTITION_ID),
serverInfoMap,
rssConf,
keyClass,
valueClass,
comparator,
raw,
combiner,
false,
null,
clientType);
reader.start();
int index = 0;
KeyValueReader keyValueReader = reader.keyValueReader();
while (keyValueReader.hasNext()) {
Record record = keyValueReader.next();
assertEquals(SerializerUtils.genData(keyClass, index), record.getKey());
Object value = SerializerUtils.genData(valueClass, index);
Object newValue = value;
if (index % 3 != 1) {
if (value instanceof IntWritable) {
newValue = new IntWritable(((IntWritable) value).get() * 2);
} else {
newValue = (int) value * 2;
}
}
assertEquals(newValue, record.getValue());
index++;
}
assertEquals(3 * RECORD_NUMBER, index);
shuffleWriteClientImpl.unregisterShuffle(testAppId);
}
@ParameterizedTest
@ValueSource(
strings = {
"org.apache.hadoop.io.Text,org.apache.hadoop.io.IntWritable,GRPC,true",
"org.apache.hadoop.io.Text,org.apache.hadoop.io.IntWritable,GRPC_NETTY,true",
"org.apache.hadoop.io.Text,org.apache.hadoop.io.IntWritable,GRPC,false",
"org.apache.hadoop.io.Text,org.apache.hadoop.io.IntWritable,GRPC_NETTY,false",
"java.lang.String,java.lang.Integer,GRPC",
"java.lang.String,java.lang.Integer,GRPC_NETTY",
"org.apache.uniffle.common.serializer.SerializerUtils$SomeClass,java.lang.Integer,GRPC",
"org.apache.uniffle.common.serializer.SerializerUtils$SomeClass,java.lang.Integer,GRPC_NETTY",
})
@Timeout(10)
public void remoteMergeWriteReadTestMultiPartition(String classes) throws Exception {
// 1 basic parameter
final String[] classArray = classes.split(",");
final String keyClassName = classArray[0];
final String valueClassName = classArray[1];
final Class keyClass = SerializerUtils.getClassByName(keyClassName);
final Class valueClass = SerializerUtils.getClassByName(valueClassName);
final String clientType = classArray[2];
final boolean raw = classArray.length > 3 ? Boolean.parseBoolean(classArray[3]) : false;
final Comparator comparator = SerializerUtils.getComparator(keyClass);
final RssConf rssConf = new RssConf();
// 2 register shuffle
createClient(clientType);
String testAppId = "remoteMergeWriteReadTestMultiPartition" + classes;
shuffleWriteClientImpl.registerShuffle(
shuffleServerInfo,
testAppId,
SHUFFLE_ID,
Lists.newArrayList(
new PartitionRange(PARTITION_ID, PARTITION_ID),
new PartitionRange(PARTITION_ID + 1, PARTITION_ID + 1),
new PartitionRange(PARTITION_ID + 2, PARTITION_ID + 2)),
new RemoteStorageInfo(""),
ShuffleDataDistributionType.NORMAL,
0,
RssProtos.MergeContext.newBuilder()
.setKeyClass(keyClass.getName())
.setValueClass(valueClass.getName())
.setComparatorClass(comparator.getClass().getName())
.setMergedBlockSize(-1)
.setMergeClassLoader("")
.build());
// 3 report shuffle result
// this shuffle have three partition, which is hash by key index mode 3
// task 0 attempt 0 generate three blocks
BlockIdLayout layout = BlockIdLayout.from(rssConf);
List<ShuffleBlockInfo> blocks1 = new ArrayList<>();
blocks1.add(
createShuffleBlockForRemoteMerge(
rssConf,
layout,
0,
0,
Lists.newArrayList(shuffleServerInfo),
keyClass,
valueClass,
0,
6,
RECORD_NUMBER,
1));
blocks1.add(
createShuffleBlockForRemoteMerge(
rssConf,
layout,
0,
2,
Lists.newArrayList(shuffleServerInfo),
keyClass,
valueClass,
2,
6,
RECORD_NUMBER,
1));
blocks1.add(
createShuffleBlockForRemoteMerge(
rssConf,
layout,
0,
1,
Lists.newArrayList(shuffleServerInfo),
keyClass,
valueClass,
4,
6,
RECORD_NUMBER,
1));
shuffleWriteClientImpl.sendShuffleData(testAppId, blocks1, () -> false);
// task 1 attempt 0 generate two blocks
List<ShuffleBlockInfo> blocks2 = new ArrayList<>();
blocks2.add(
createShuffleBlockForRemoteMerge(
rssConf,
layout,
1,
1,
Lists.newArrayList(shuffleServerInfo),
keyClass,
valueClass,
1,
6,
RECORD_NUMBER,
1));
blocks2.add(
createShuffleBlockForRemoteMerge(
rssConf,
layout,
1,
0,
Lists.newArrayList(shuffleServerInfo),
keyClass,
valueClass,
3,
6,
RECORD_NUMBER,
1));
blocks2.add(
createShuffleBlockForRemoteMerge(
rssConf,
layout,
1,
2,
Lists.newArrayList(shuffleServerInfo),
keyClass,
valueClass,
5,
6,
RECORD_NUMBER,
1));
shuffleWriteClientImpl.sendShuffleData(testAppId, blocks2, () -> false);
Map<Integer, List<ShuffleServerInfo>> partitionToServers =
ImmutableMap.of(
PARTITION_ID,
Lists.newArrayList(shuffleServerInfo),
PARTITION_ID + 1,
Lists.newArrayList(shuffleServerInfo),
PARTITION_ID + 2,
Lists.newArrayList(shuffleServerInfo));
// 4 report shuffle result
Map<Integer, Set<Long>> ptb = new HashMap<>();
for (int i = PARTITION_ID; i < PARTITION_ID + 3; i++) {
final int partitionId = i;
ptb.put(partitionId, new HashSet<>());
ptb.get(partitionId)
.addAll(
blocks1.stream()
.filter(s -> s.getPartitionId() == partitionId)
.map(s -> s.getBlockId())
.collect(Collectors.toList()));
ptb.get(partitionId)
.addAll(
blocks2.stream()
.filter(s -> s.getPartitionId() == partitionId)
.map(s -> s.getBlockId())
.collect(Collectors.toList()));
}
Map<ShuffleServerInfo, Map<Integer, Set<Long>>> serverToPartitionToBlockIds = new HashMap();
serverToPartitionToBlockIds.put(shuffleServerInfo, ptb);
shuffleWriteClientImpl.reportShuffleResult(
serverToPartitionToBlockIds, testAppId, SHUFFLE_ID, 0, 1);
shuffleWriteClientImpl.reportShuffleResult(
serverToPartitionToBlockIds, testAppId, SHUFFLE_ID, 1, 1);
// 5 report unique blocks
for (int i = PARTITION_ID; i < PARTITION_ID + 3; i++) {
Roaring64NavigableMap uniqueBlockIds = Roaring64NavigableMap.bitmapOf();
ptb.get(i).stream().forEach(block -> uniqueBlockIds.add(block));
shuffleWriteClientImpl.startSortMerge(
Sets.newHashSet(shuffleServerInfo), testAppId, SHUFFLE_ID, i, uniqueBlockIds);
}
// 6 read result
Map<Integer, List<ShuffleServerInfo>> serverInfoMap =
ImmutableMap.of(
PARTITION_ID,
Lists.newArrayList(shuffleServerInfo),
PARTITION_ID + 1,
Lists.newArrayList(shuffleServerInfo),
PARTITION_ID + 2,
Lists.newArrayList(shuffleServerInfo));
RMRecordsReader reader =
new RMRecordsReader(
testAppId,
SHUFFLE_ID,
Sets.newHashSet(PARTITION_ID, PARTITION_ID + 1, PARTITION_ID + 2),
serverInfoMap,
rssConf,
keyClass,
valueClass,
comparator,
raw,
null,
false,
null,
clientType);
reader.start();
int index = 0;
KeyValueReader keyValueReader = reader.keyValueReader();
while (keyValueReader.hasNext()) {
Record record = keyValueReader.next();
assertEquals(SerializerUtils.genData(keyClass, index), record.getKey());
assertEquals(SerializerUtils.genData(valueClass, index), record.getValue());
index++;
}
assertEquals(6 * RECORD_NUMBER, index);
shuffleWriteClientImpl.unregisterShuffle(testAppId);
}
@ParameterizedTest
@ValueSource(
strings = {
"org.apache.hadoop.io.Text,org.apache.hadoop.io.IntWritable,GRPC,true",
"org.apache.hadoop.io.Text,org.apache.hadoop.io.IntWritable,GRPC_NETTY,true",
"org.apache.hadoop.io.Text,org.apache.hadoop.io.IntWritable,GRPC,false",
"org.apache.hadoop.io.Text,org.apache.hadoop.io.IntWritable,GRPC_NETTY,false",
"java.lang.String,java.lang.Integer,GRPC",
"java.lang.String,java.lang.Integer,GRPC_NETTY",
"org.apache.uniffle.common.serializer.SerializerUtils$SomeClass,java.lang.Integer,GRPC",
"org.apache.uniffle.common.serializer.SerializerUtils$SomeClass,java.lang.Integer,GRPC_NETTY",
})
@Timeout(10)
public void remoteMergeWriteReadTestMultiPartitionWithCombine(String classes) throws Exception {
// 1 basic parameter
final String[] classArray = classes.split(",");
final String keyClassName = classArray[0];
final String valueClassName = classArray[1];
final Class keyClass = SerializerUtils.getClassByName(keyClassName);
final Class valueClass = SerializerUtils.getClassByName(valueClassName);
final String clientType = classArray[2];
final boolean raw = classArray.length > 3 ? Boolean.parseBoolean(classArray[3]) : false;
final Comparator comparator = SerializerUtils.getComparator(keyClass);
final RssConf rssConf = new RssConf();
SerializerFactory factory = new SerializerFactory(rssConf);
Serializer serializer = factory.getSerializer(keyClass);
SerializerInstance serializerInstance = serializer.newInstance();
final Combiner combiner = new SumByKeyCombiner(raw, serializerInstance, keyClass, valueClass);
// 2 register shuffle
createClient(clientType);
String testAppId = "remoteMergeWriteReadTestMultiPartitionWithCombine" + classes;
shuffleWriteClientImpl.registerShuffle(
shuffleServerInfo,
testAppId,
SHUFFLE_ID,
Lists.newArrayList(
new PartitionRange(PARTITION_ID, PARTITION_ID),
new PartitionRange(PARTITION_ID + 1, PARTITION_ID + 1),
new PartitionRange(PARTITION_ID + 2, PARTITION_ID + 2)),
new RemoteStorageInfo(""),
ShuffleDataDistributionType.NORMAL,
0,
RssProtos.MergeContext.newBuilder()
.setKeyClass(keyClass.getName())
.setValueClass(valueClass.getName())
.setComparatorClass(comparator.getClass().getName())
.setMergedBlockSize(-1)
.setMergeClassLoader("")
.build());
// 3 report shuffle result
// this shuffle have three partition, which is hash by key index mode 3
// task 0 attempt 0 generate three blocks
BlockIdLayout layout = BlockIdLayout.from(rssConf);
List<ShuffleBlockInfo> blocks1 = new ArrayList<>();
blocks1.add(
createShuffleBlockForRemoteMerge(
rssConf,
layout,
0,
0,
Lists.newArrayList(shuffleServerInfo),
keyClass,
valueClass,
0,
6,
RECORD_NUMBER,
2));
blocks1.add(
createShuffleBlockForRemoteMerge(
rssConf,
layout,
0,
2,
Lists.newArrayList(shuffleServerInfo),
keyClass,
valueClass,
2,
6,
RECORD_NUMBER,
2));
blocks1.add(
createShuffleBlockForRemoteMerge(
rssConf,
layout,
0,
1,
Lists.newArrayList(shuffleServerInfo),
keyClass,
valueClass,
4,
6,
RECORD_NUMBER,
2));
shuffleWriteClientImpl.sendShuffleData(testAppId, blocks1, () -> false);
// task 1 attempt 0 generate two blocks
List<ShuffleBlockInfo> blocks2 = new ArrayList<>();
blocks2.add(
createShuffleBlockForRemoteMerge(
rssConf,
layout,
1,
1,
Lists.newArrayList(shuffleServerInfo),
keyClass,
valueClass,
1,
6,
RECORD_NUMBER,
2));
blocks2.add(
createShuffleBlockForRemoteMerge(
rssConf,
layout,
1,
0,
Lists.newArrayList(shuffleServerInfo),
keyClass,
valueClass,
3,
6,
RECORD_NUMBER,
2));
blocks2.add(
createShuffleBlockForRemoteMerge(
rssConf,
layout,
1,
2,
Lists.newArrayList(shuffleServerInfo),
keyClass,
valueClass,
5,
6,
RECORD_NUMBER,
2));
shuffleWriteClientImpl.sendShuffleData(testAppId, blocks2, () -> false);
Map<Integer, List<ShuffleServerInfo>> partitionToServers =
ImmutableMap.of(
PARTITION_ID,
Lists.newArrayList(shuffleServerInfo),
PARTITION_ID + 1,
Lists.newArrayList(shuffleServerInfo),
PARTITION_ID + 2,
Lists.newArrayList(shuffleServerInfo));
// 4 report shuffle result
Map<Integer, Set<Long>> ptb = new HashMap<>();
for (int i = PARTITION_ID; i < PARTITION_ID + 3; i++) {
final int partitionId = i;
ptb.put(partitionId, new HashSet<>());
ptb.get(partitionId)
.addAll(
blocks1.stream()
.filter(s -> s.getPartitionId() == partitionId)
.map(s -> s.getBlockId())
.collect(Collectors.toList()));
ptb.get(partitionId)
.addAll(
blocks2.stream()
.filter(s -> s.getPartitionId() == partitionId)
.map(s -> s.getBlockId())
.collect(Collectors.toList()));
}
Map<ShuffleServerInfo, Map<Integer, Set<Long>>> serverToPartitionToBlockIds = new HashMap();
serverToPartitionToBlockIds.put(shuffleServerInfo, ptb);
shuffleWriteClientImpl.reportShuffleResult(
serverToPartitionToBlockIds, testAppId, SHUFFLE_ID, 0, 1);
shuffleWriteClientImpl.reportShuffleResult(
serverToPartitionToBlockIds, testAppId, SHUFFLE_ID, 1, 1);
// 5 report unique blocks
for (int i = PARTITION_ID; i < PARTITION_ID + 3; i++) {
Roaring64NavigableMap uniqueBlockIds = Roaring64NavigableMap.bitmapOf();
ptb.get(i).stream().forEach(block -> uniqueBlockIds.add(block));
shuffleWriteClientImpl.startSortMerge(
new HashSet<>(partitionToServers.get(i)), testAppId, SHUFFLE_ID, i, uniqueBlockIds);
}
// 6 read result
Map<Integer, List<ShuffleServerInfo>> serverInfoMap =
ImmutableMap.of(
PARTITION_ID,
Lists.newArrayList(shuffleServerInfo),
PARTITION_ID + 1,
Lists.newArrayList(shuffleServerInfo),
PARTITION_ID + 2,
Lists.newArrayList(shuffleServerInfo));
RMRecordsReader reader =
new RMRecordsReader(
testAppId,
SHUFFLE_ID,
Sets.newHashSet(PARTITION_ID, PARTITION_ID + 1, PARTITION_ID + 2),
serverInfoMap,
rssConf,
keyClass,
valueClass,
comparator,
raw,
combiner,
false,
null,
clientType);
reader.start();
int index = 0;
KeyValueReader keyValueReader = reader.keyValueReader();
while (keyValueReader.hasNext()) {
Record record = keyValueReader.next();
assertEquals(SerializerUtils.genData(keyClass, index), record.getKey());
assertEquals(SerializerUtils.genData(valueClass, index * 2), record.getValue());
index++;
}
assertEquals(6 * RECORD_NUMBER, index);
shuffleWriteClientImpl.unregisterShuffle(testAppId);
}
private static final AtomicInteger ATOMIC_INT_SORTED = new AtomicInteger(0);
public static ShuffleBlockInfo createShuffleBlockForRemoteMerge(
RssConf rssConf,
BlockIdLayout blockIdLayout,
int taskAttemptId,
int partitionId,
List<ShuffleServerInfo> shuffleServerInfoList,
Class keyClass,
Class valueClass,
int start,
int interval,
int samples,
int duplicated)
throws IOException {
long blockId =
blockIdLayout.getBlockId(ATOMIC_INT_SORTED.getAndIncrement(), PARTITION_ID, taskAttemptId);
ByteBuf byteBuf =
SerializerUtils.genSortedRecordBuffer(
rssConf, keyClass, valueClass, start, interval, samples, duplicated);
ByteBuffer byteBuffer = byteBuf.nioBuffer();
return new ShuffleBlockInfo(
SHUFFLE_ID,
partitionId,
blockId,
byteBuf.readableBytes(),
ChecksumUtils.getCrc32(byteBuffer),
byteBuffer.array(),
shuffleServerInfoList,
byteBuf.readableBytes(),
0,
taskAttemptId);
}
}
|
google/guava | 35,039 | guava/src/com/google/common/collect/TreeMultiset.java | /*
* Copyright (C) 2007 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.collect;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Preconditions.checkState;
import static com.google.common.collect.CollectPreconditions.checkNonnegative;
import static com.google.common.collect.NullnessCasts.uncheckedCastNullableTToT;
import static java.lang.Math.max;
import static java.util.Objects.requireNonNull;
import com.google.common.annotations.GwtCompatible;
import com.google.common.annotations.GwtIncompatible;
import com.google.common.annotations.J2ktIncompatible;
import com.google.common.base.MoreObjects;
import com.google.common.primitives.Ints;
import com.google.errorprone.annotations.CanIgnoreReturnValue;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.Serializable;
import java.util.Comparator;
import java.util.ConcurrentModificationException;
import java.util.Iterator;
import java.util.NoSuchElementException;
import java.util.function.ObjIntConsumer;
import org.jspecify.annotations.Nullable;
/**
* A multiset which maintains the ordering of its elements, according to either their natural order
* or an explicit {@link Comparator}. In all cases, this implementation uses {@link
* Comparable#compareTo} or {@link Comparator#compare} instead of {@link Object#equals} to determine
* equivalence of instances.
*
* <p><b>Warning:</b> The comparison must be <i>consistent with equals</i> as explained by the
* {@link Comparable} class specification. Otherwise, the resulting multiset will violate the {@link
* java.util.Collection} contract, which is specified in terms of {@link Object#equals}.
*
* <p>See the Guava User Guide article on <a href=
* "https://github.com/google/guava/wiki/NewCollectionTypesExplained#multiset">{@code Multiset}</a>.
*
* @author Louis Wasserman
* @author Jared Levy
* @since 2.0
*/
@GwtCompatible
public final class TreeMultiset<E extends @Nullable Object> extends AbstractSortedMultiset<E>
implements Serializable {
/**
* Creates a new, empty multiset, sorted according to the elements' natural order. All elements
* inserted into the multiset must implement the {@code Comparable} interface. Furthermore, all
* such elements must be <i>mutually comparable</i>: {@code e1.compareTo(e2)} must not throw a
* {@code ClassCastException} for any elements {@code e1} and {@code e2} in the multiset. If the
* user attempts to add an element to the multiset that violates this constraint (for example, the
* user attempts to add a string element to a set whose elements are integers), the {@code
* add(Object)} call will throw a {@code ClassCastException}.
*
* <p>The type specification is {@code <E extends Comparable>}, instead of the more specific
* {@code <E extends Comparable<? super E>>}, to support classes defined without generics.
*/
@SuppressWarnings("rawtypes") // https://github.com/google/guava/issues/989
public static <E extends Comparable> TreeMultiset<E> create() {
return new TreeMultiset<>(Ordering.natural());
}
/**
* Creates a new, empty multiset, sorted according to the specified comparator. All elements
* inserted into the multiset must be <i>mutually comparable</i> by the specified comparator:
* {@code comparator.compare(e1, e2)} must not throw a {@code ClassCastException} for any elements
* {@code e1} and {@code e2} in the multiset. If the user attempts to add an element to the
* multiset that violates this constraint, the {@code add(Object)} call will throw a {@code
* ClassCastException}.
*
* @param comparator the comparator that will be used to sort this multiset. A null value
* indicates that the elements' <i>natural ordering</i> should be used.
*/
@SuppressWarnings("unchecked")
public static <E extends @Nullable Object> TreeMultiset<E> create(
@Nullable Comparator<? super E> comparator) {
return (comparator == null)
? new TreeMultiset<E>((Comparator) Ordering.natural())
: new TreeMultiset<E>(comparator);
}
/**
* Creates an empty multiset containing the given initial elements, sorted according to the
* elements' natural order.
*
* <p>This implementation is highly efficient when {@code elements} is itself a {@link Multiset}.
*
* <p>The type specification is {@code <E extends Comparable>}, instead of the more specific
* {@code <E extends Comparable<? super E>>}, to support classes defined without generics.
*/
@SuppressWarnings("rawtypes") // https://github.com/google/guava/issues/989
public static <E extends Comparable> TreeMultiset<E> create(Iterable<? extends E> elements) {
TreeMultiset<E> multiset = create();
Iterables.addAll(multiset, elements);
return multiset;
}
private final transient Reference<AvlNode<E>> rootReference;
private final transient GeneralRange<E> range;
private final transient AvlNode<E> header;
TreeMultiset(Reference<AvlNode<E>> rootReference, GeneralRange<E> range, AvlNode<E> endLink) {
super(range.comparator());
this.rootReference = rootReference;
this.range = range;
this.header = endLink;
}
TreeMultiset(Comparator<? super E> comparator) {
super(comparator);
this.range = GeneralRange.all(comparator);
this.header = new AvlNode<>();
successor(header, header);
this.rootReference = new Reference<>();
}
/** A function which can be summed across a subtree. */
private enum Aggregate {
SIZE {
@Override
int nodeAggregate(AvlNode<?> node) {
return node.elemCount;
}
@Override
long treeAggregate(@Nullable AvlNode<?> root) {
return (root == null) ? 0 : root.totalCount;
}
},
DISTINCT {
@Override
int nodeAggregate(AvlNode<?> node) {
return 1;
}
@Override
long treeAggregate(@Nullable AvlNode<?> root) {
return (root == null) ? 0 : root.distinctElements;
}
};
abstract int nodeAggregate(AvlNode<?> node);
abstract long treeAggregate(@Nullable AvlNode<?> root);
}
private long aggregateForEntries(Aggregate aggr) {
AvlNode<E> root = rootReference.get();
long total = aggr.treeAggregate(root);
if (range.hasLowerBound()) {
total -= aggregateBelowRange(aggr, root);
}
if (range.hasUpperBound()) {
total -= aggregateAboveRange(aggr, root);
}
return total;
}
private long aggregateBelowRange(Aggregate aggr, @Nullable AvlNode<E> node) {
if (node == null) {
return 0;
}
// The cast is safe because we call this method only if hasLowerBound().
int cmp =
comparator()
.compare(uncheckedCastNullableTToT(range.getLowerEndpoint()), node.getElement());
if (cmp < 0) {
return aggregateBelowRange(aggr, node.left);
} else if (cmp == 0) {
switch (range.getLowerBoundType()) {
case OPEN:
return aggr.nodeAggregate(node) + aggr.treeAggregate(node.left);
case CLOSED:
return aggr.treeAggregate(node.left);
}
throw new AssertionError();
} else {
return aggr.treeAggregate(node.left)
+ aggr.nodeAggregate(node)
+ aggregateBelowRange(aggr, node.right);
}
}
private long aggregateAboveRange(Aggregate aggr, @Nullable AvlNode<E> node) {
if (node == null) {
return 0;
}
// The cast is safe because we call this method only if hasUpperBound().
int cmp =
comparator()
.compare(uncheckedCastNullableTToT(range.getUpperEndpoint()), node.getElement());
if (cmp > 0) {
return aggregateAboveRange(aggr, node.right);
} else if (cmp == 0) {
switch (range.getUpperBoundType()) {
case OPEN:
return aggr.nodeAggregate(node) + aggr.treeAggregate(node.right);
case CLOSED:
return aggr.treeAggregate(node.right);
}
throw new AssertionError();
} else {
return aggr.treeAggregate(node.right)
+ aggr.nodeAggregate(node)
+ aggregateAboveRange(aggr, node.left);
}
}
@Override
public int size() {
return Ints.saturatedCast(aggregateForEntries(Aggregate.SIZE));
}
@Override
int distinctElements() {
return Ints.saturatedCast(aggregateForEntries(Aggregate.DISTINCT));
}
static int distinctElements(@Nullable AvlNode<?> node) {
return (node == null) ? 0 : node.distinctElements;
}
@Override
public int count(@Nullable Object element) {
try {
@SuppressWarnings("unchecked")
E e = (E) element;
AvlNode<E> root = rootReference.get();
if (!range.contains(e) || root == null) {
return 0;
}
return root.count(comparator(), e);
} catch (ClassCastException | NullPointerException e) {
return 0;
}
}
@CanIgnoreReturnValue
@Override
public int add(@ParametricNullness E element, int occurrences) {
checkNonnegative(occurrences, "occurrences");
if (occurrences == 0) {
return count(element);
}
checkArgument(range.contains(element));
AvlNode<E> root = rootReference.get();
if (root == null) {
int unused = comparator().compare(element, element);
AvlNode<E> newRoot = new AvlNode<>(element, occurrences);
successor(header, newRoot, header);
rootReference.checkAndSet(root, newRoot);
return 0;
}
int[] result = new int[1]; // used as a mutable int reference to hold result
AvlNode<E> newRoot = root.add(comparator(), element, occurrences, result);
rootReference.checkAndSet(root, newRoot);
return result[0];
}
@CanIgnoreReturnValue
@Override
public int remove(@Nullable Object element, int occurrences) {
checkNonnegative(occurrences, "occurrences");
if (occurrences == 0) {
return count(element);
}
AvlNode<E> root = rootReference.get();
int[] result = new int[1]; // used as a mutable int reference to hold result
AvlNode<E> newRoot;
try {
@SuppressWarnings("unchecked")
E e = (E) element;
if (!range.contains(e) || root == null) {
return 0;
}
newRoot = root.remove(comparator(), e, occurrences, result);
} catch (ClassCastException | NullPointerException e) {
return 0;
}
rootReference.checkAndSet(root, newRoot);
return result[0];
}
@CanIgnoreReturnValue
@Override
public int setCount(@ParametricNullness E element, int count) {
checkNonnegative(count, "count");
if (!range.contains(element)) {
checkArgument(count == 0);
return 0;
}
AvlNode<E> root = rootReference.get();
if (root == null) {
if (count > 0) {
add(element, count);
}
return 0;
}
int[] result = new int[1]; // used as a mutable int reference to hold result
AvlNode<E> newRoot = root.setCount(comparator(), element, count, result);
rootReference.checkAndSet(root, newRoot);
return result[0];
}
@CanIgnoreReturnValue
@Override
public boolean setCount(@ParametricNullness E element, int oldCount, int newCount) {
checkNonnegative(newCount, "newCount");
checkNonnegative(oldCount, "oldCount");
checkArgument(range.contains(element));
AvlNode<E> root = rootReference.get();
if (root == null) {
if (oldCount == 0) {
if (newCount > 0) {
add(element, newCount);
}
return true;
} else {
return false;
}
}
int[] result = new int[1]; // used as a mutable int reference to hold result
AvlNode<E> newRoot = root.setCount(comparator(), element, oldCount, newCount, result);
rootReference.checkAndSet(root, newRoot);
return result[0] == oldCount;
}
@Override
public void clear() {
if (!range.hasLowerBound() && !range.hasUpperBound()) {
// We can do this in O(n) rather than removing one by one, which could force rebalancing.
for (AvlNode<E> current = header.succ(); current != header; ) {
AvlNode<E> next = current.succ();
current.elemCount = 0;
// Also clear these fields so that one deleted Entry doesn't retain all elements.
current.left = null;
current.right = null;
current.pred = null;
current.succ = null;
current = next;
}
successor(header, header);
rootReference.clear();
} else {
// TODO(cpovirk): Perhaps we can optimize in this case, too?
Iterators.clear(entryIterator());
}
}
private Entry<E> wrapEntry(AvlNode<E> baseEntry) {
return new Multisets.AbstractEntry<E>() {
@Override
@ParametricNullness
public E getElement() {
return baseEntry.getElement();
}
@Override
public int getCount() {
int result = baseEntry.getCount();
if (result == 0) {
return count(getElement());
} else {
return result;
}
}
};
}
/** Returns the first node in the tree that is in range. */
private @Nullable AvlNode<E> firstNode() {
AvlNode<E> root = rootReference.get();
if (root == null) {
return null;
}
AvlNode<E> node;
if (range.hasLowerBound()) {
// The cast is safe because of the hasLowerBound check.
E endpoint = uncheckedCastNullableTToT(range.getLowerEndpoint());
node = root.ceiling(comparator(), endpoint);
if (node == null) {
return null;
}
if (range.getLowerBoundType() == BoundType.OPEN
&& comparator().compare(endpoint, node.getElement()) == 0) {
node = node.succ();
}
} else {
node = header.succ();
}
return (node == header || !range.contains(node.getElement())) ? null : node;
}
private @Nullable AvlNode<E> lastNode() {
AvlNode<E> root = rootReference.get();
if (root == null) {
return null;
}
AvlNode<E> node;
if (range.hasUpperBound()) {
// The cast is safe because of the hasUpperBound check.
E endpoint = uncheckedCastNullableTToT(range.getUpperEndpoint());
node = root.floor(comparator(), endpoint);
if (node == null) {
return null;
}
if (range.getUpperBoundType() == BoundType.OPEN
&& comparator().compare(endpoint, node.getElement()) == 0) {
node = node.pred();
}
} else {
node = header.pred();
}
return (node == header || !range.contains(node.getElement())) ? null : node;
}
@Override
Iterator<E> elementIterator() {
return Multisets.elementIterator(entryIterator());
}
@Override
Iterator<Entry<E>> entryIterator() {
return new Iterator<Entry<E>>() {
@Nullable AvlNode<E> current = firstNode();
@Nullable Entry<E> prevEntry;
@Override
public boolean hasNext() {
if (current == null) {
return false;
} else if (range.tooHigh(current.getElement())) {
current = null;
return false;
} else {
return true;
}
}
@Override
public Entry<E> next() {
if (!hasNext()) {
throw new NoSuchElementException();
}
// requireNonNull is safe because current is only nulled out after iteration is complete.
Entry<E> result = wrapEntry(requireNonNull(current));
prevEntry = result;
if (current.succ() == header) {
current = null;
} else {
current = current.succ();
}
return result;
}
@Override
public void remove() {
checkState(prevEntry != null, "no calls to next() since the last call to remove()");
setCount(prevEntry.getElement(), 0);
prevEntry = null;
}
};
}
@Override
Iterator<Entry<E>> descendingEntryIterator() {
return new Iterator<Entry<E>>() {
@Nullable AvlNode<E> current = lastNode();
@Nullable Entry<E> prevEntry = null;
@Override
public boolean hasNext() {
if (current == null) {
return false;
} else if (range.tooLow(current.getElement())) {
current = null;
return false;
} else {
return true;
}
}
@Override
public Entry<E> next() {
if (!hasNext()) {
throw new NoSuchElementException();
}
// requireNonNull is safe because current is only nulled out after iteration is complete.
requireNonNull(current);
Entry<E> result = wrapEntry(current);
prevEntry = result;
if (current.pred() == header) {
current = null;
} else {
current = current.pred();
}
return result;
}
@Override
public void remove() {
checkState(prevEntry != null, "no calls to next() since the last call to remove()");
setCount(prevEntry.getElement(), 0);
prevEntry = null;
}
};
}
@Override
public void forEachEntry(ObjIntConsumer<? super E> action) {
checkNotNull(action);
for (AvlNode<E> node = firstNode();
node != header && node != null && !range.tooHigh(node.getElement());
node = node.succ()) {
action.accept(node.getElement(), node.getCount());
}
}
@Override
public Iterator<E> iterator() {
return Multisets.iteratorImpl(this);
}
@Override
public SortedMultiset<E> headMultiset(@ParametricNullness E upperBound, BoundType boundType) {
return new TreeMultiset<>(
rootReference,
range.intersect(GeneralRange.upTo(comparator(), upperBound, boundType)),
header);
}
@Override
public SortedMultiset<E> tailMultiset(@ParametricNullness E lowerBound, BoundType boundType) {
return new TreeMultiset<>(
rootReference,
range.intersect(GeneralRange.downTo(comparator(), lowerBound, boundType)),
header);
}
private static final class Reference<T> {
private @Nullable T value;
@Nullable T get() {
return value;
}
void checkAndSet(@Nullable T expected, @Nullable T newValue) {
if (value != expected) {
throw new ConcurrentModificationException();
}
value = newValue;
}
void clear() {
value = null;
}
}
private static final class AvlNode<E extends @Nullable Object> {
/*
* For "normal" nodes, the type of this field is `E`, not `@Nullable E` (though note that E is a
* type that can include null, as in a TreeMultiset<@Nullable String>).
*
* For the header node, though, this field contains `null`, regardless of the type of the
* multiset.
*
* Most code that operates on an AvlNode never operates on the header node. Such code can access
* the elem field without a null check by calling getElement().
*/
private final @Nullable E elem;
// elemCount is 0 iff this node has been deleted.
private int elemCount;
private int distinctElements;
private long totalCount;
private int height;
private @Nullable AvlNode<E> left;
private @Nullable AvlNode<E> right;
/*
* pred and succ are nullable after construction, but we always call successor() to initialize
* them immediately thereafter.
*
* They may be subsequently nulled out by TreeMultiset.clear(). I think that the only place that
* we can reference a node whose fields have been cleared is inside the iterator (and presumably
* only under concurrent modification).
*
* To access these fields when you know that they are not null, call the pred() and succ()
* methods, which perform null checks before returning the fields.
*/
private @Nullable AvlNode<E> pred;
private @Nullable AvlNode<E> succ;
AvlNode(@ParametricNullness E elem, int elemCount) {
checkArgument(elemCount > 0);
this.elem = elem;
this.elemCount = elemCount;
this.totalCount = elemCount;
this.distinctElements = 1;
this.height = 1;
this.left = null;
this.right = null;
}
/** Constructor for the header node. */
AvlNode() {
this.elem = null;
this.elemCount = 1;
}
// For discussion of pred() and succ(), see the comment on the pred and succ fields.
private AvlNode<E> pred() {
return requireNonNull(pred);
}
private AvlNode<E> succ() {
return requireNonNull(succ);
}
int count(Comparator<? super E> comparator, @ParametricNullness E e) {
int cmp = comparator.compare(e, getElement());
if (cmp < 0) {
return (left == null) ? 0 : left.count(comparator, e);
} else if (cmp > 0) {
return (right == null) ? 0 : right.count(comparator, e);
} else {
return elemCount;
}
}
@CanIgnoreReturnValue
private AvlNode<E> addRightChild(@ParametricNullness E e, int count) {
right = new AvlNode<>(e, count);
successor(this, right, succ());
height = max(2, height);
distinctElements++;
totalCount += count;
return this;
}
@CanIgnoreReturnValue
private AvlNode<E> addLeftChild(@ParametricNullness E e, int count) {
left = new AvlNode<>(e, count);
successor(pred(), left, this);
height = max(2, height);
distinctElements++;
totalCount += count;
return this;
}
AvlNode<E> add(
Comparator<? super E> comparator, @ParametricNullness E e, int count, int[] result) {
/*
* It speeds things up considerably to unconditionally add count to totalCount here,
* but that destroys failure atomicity in the case of count overflow. =(
*/
int cmp = comparator.compare(e, getElement());
if (cmp < 0) {
AvlNode<E> initLeft = left;
if (initLeft == null) {
result[0] = 0;
return addLeftChild(e, count);
}
int initHeight = initLeft.height;
left = initLeft.add(comparator, e, count, result);
if (result[0] == 0) {
distinctElements++;
}
this.totalCount += count;
return (left.height == initHeight) ? this : rebalance();
} else if (cmp > 0) {
AvlNode<E> initRight = right;
if (initRight == null) {
result[0] = 0;
return addRightChild(e, count);
}
int initHeight = initRight.height;
right = initRight.add(comparator, e, count, result);
if (result[0] == 0) {
distinctElements++;
}
this.totalCount += count;
return (right.height == initHeight) ? this : rebalance();
}
// adding count to me! No rebalance possible.
result[0] = elemCount;
long resultCount = (long) elemCount + count;
checkArgument(resultCount <= Integer.MAX_VALUE);
this.elemCount += count;
this.totalCount += count;
return this;
}
@Nullable AvlNode<E> remove(
Comparator<? super E> comparator, @ParametricNullness E e, int count, int[] result) {
int cmp = comparator.compare(e, getElement());
if (cmp < 0) {
AvlNode<E> initLeft = left;
if (initLeft == null) {
result[0] = 0;
return this;
}
left = initLeft.remove(comparator, e, count, result);
if (result[0] > 0) {
if (count >= result[0]) {
this.distinctElements--;
this.totalCount -= result[0];
} else {
this.totalCount -= count;
}
}
return (result[0] == 0) ? this : rebalance();
} else if (cmp > 0) {
AvlNode<E> initRight = right;
if (initRight == null) {
result[0] = 0;
return this;
}
right = initRight.remove(comparator, e, count, result);
if (result[0] > 0) {
if (count >= result[0]) {
this.distinctElements--;
this.totalCount -= result[0];
} else {
this.totalCount -= count;
}
}
return rebalance();
}
// removing count from me!
result[0] = elemCount;
if (count >= elemCount) {
return deleteMe();
} else {
this.elemCount -= count;
this.totalCount -= count;
return this;
}
}
@Nullable AvlNode<E> setCount(
Comparator<? super E> comparator, @ParametricNullness E e, int count, int[] result) {
int cmp = comparator.compare(e, getElement());
if (cmp < 0) {
AvlNode<E> initLeft = left;
if (initLeft == null) {
result[0] = 0;
return (count > 0) ? addLeftChild(e, count) : this;
}
left = initLeft.setCount(comparator, e, count, result);
if (count == 0 && result[0] != 0) {
this.distinctElements--;
} else if (count > 0 && result[0] == 0) {
this.distinctElements++;
}
this.totalCount += count - result[0];
return rebalance();
} else if (cmp > 0) {
AvlNode<E> initRight = right;
if (initRight == null) {
result[0] = 0;
return (count > 0) ? addRightChild(e, count) : this;
}
right = initRight.setCount(comparator, e, count, result);
if (count == 0 && result[0] != 0) {
this.distinctElements--;
} else if (count > 0 && result[0] == 0) {
this.distinctElements++;
}
this.totalCount += count - result[0];
return rebalance();
}
// setting my count
result[0] = elemCount;
if (count == 0) {
return deleteMe();
}
this.totalCount += count - elemCount;
this.elemCount = count;
return this;
}
@Nullable AvlNode<E> setCount(
Comparator<? super E> comparator,
@ParametricNullness E e,
int expectedCount,
int newCount,
int[] result) {
int cmp = comparator.compare(e, getElement());
if (cmp < 0) {
AvlNode<E> initLeft = left;
if (initLeft == null) {
result[0] = 0;
if (expectedCount == 0 && newCount > 0) {
return addLeftChild(e, newCount);
}
return this;
}
left = initLeft.setCount(comparator, e, expectedCount, newCount, result);
if (result[0] == expectedCount) {
if (newCount == 0 && result[0] != 0) {
this.distinctElements--;
} else if (newCount > 0 && result[0] == 0) {
this.distinctElements++;
}
this.totalCount += newCount - result[0];
}
return rebalance();
} else if (cmp > 0) {
AvlNode<E> initRight = right;
if (initRight == null) {
result[0] = 0;
if (expectedCount == 0 && newCount > 0) {
return addRightChild(e, newCount);
}
return this;
}
right = initRight.setCount(comparator, e, expectedCount, newCount, result);
if (result[0] == expectedCount) {
if (newCount == 0 && result[0] != 0) {
this.distinctElements--;
} else if (newCount > 0 && result[0] == 0) {
this.distinctElements++;
}
this.totalCount += newCount - result[0];
}
return rebalance();
}
// setting my count
result[0] = elemCount;
if (expectedCount == elemCount) {
if (newCount == 0) {
return deleteMe();
}
this.totalCount += newCount - elemCount;
this.elemCount = newCount;
}
return this;
}
private @Nullable AvlNode<E> deleteMe() {
int oldElemCount = this.elemCount;
this.elemCount = 0;
successor(pred(), succ());
if (left == null) {
return right;
} else if (right == null) {
return left;
} else if (left.height >= right.height) {
AvlNode<E> newTop = pred();
// newTop is the maximum node in my left subtree
newTop.left = left.removeMax(newTop);
newTop.right = right;
newTop.distinctElements = distinctElements - 1;
newTop.totalCount = totalCount - oldElemCount;
return newTop.rebalance();
} else {
AvlNode<E> newTop = succ();
newTop.right = right.removeMin(newTop);
newTop.left = left;
newTop.distinctElements = distinctElements - 1;
newTop.totalCount = totalCount - oldElemCount;
return newTop.rebalance();
}
}
// Removes the minimum node from this subtree to be reused elsewhere
private @Nullable AvlNode<E> removeMin(AvlNode<E> node) {
if (left == null) {
return right;
} else {
left = left.removeMin(node);
distinctElements--;
totalCount -= node.elemCount;
return rebalance();
}
}
// Removes the maximum node from this subtree to be reused elsewhere
private @Nullable AvlNode<E> removeMax(AvlNode<E> node) {
if (right == null) {
return left;
} else {
right = right.removeMax(node);
distinctElements--;
totalCount -= node.elemCount;
return rebalance();
}
}
private void recomputeMultiset() {
this.distinctElements =
1 + TreeMultiset.distinctElements(left) + TreeMultiset.distinctElements(right);
this.totalCount = elemCount + totalCount(left) + totalCount(right);
}
private void recomputeHeight() {
this.height = 1 + max(height(left), height(right));
}
private void recompute() {
recomputeMultiset();
recomputeHeight();
}
private AvlNode<E> rebalance() {
switch (balanceFactor()) {
case -2:
// requireNonNull is safe because right must exist in order to get a negative factor.
requireNonNull(right);
if (right.balanceFactor() > 0) {
right = right.rotateRight();
}
return rotateLeft();
case 2:
// requireNonNull is safe because left must exist in order to get a positive factor.
requireNonNull(left);
if (left.balanceFactor() < 0) {
left = left.rotateLeft();
}
return rotateRight();
default:
recomputeHeight();
return this;
}
}
private int balanceFactor() {
return height(left) - height(right);
}
private AvlNode<E> rotateLeft() {
checkState(right != null);
AvlNode<E> newTop = right;
this.right = newTop.left;
newTop.left = this;
newTop.totalCount = this.totalCount;
newTop.distinctElements = this.distinctElements;
this.recompute();
newTop.recomputeHeight();
return newTop;
}
private AvlNode<E> rotateRight() {
checkState(left != null);
AvlNode<E> newTop = left;
this.left = newTop.right;
newTop.right = this;
newTop.totalCount = this.totalCount;
newTop.distinctElements = this.distinctElements;
this.recompute();
newTop.recomputeHeight();
return newTop;
}
private static long totalCount(@Nullable AvlNode<?> node) {
return (node == null) ? 0 : node.totalCount;
}
private static int height(@Nullable AvlNode<?> node) {
return (node == null) ? 0 : node.height;
}
private @Nullable AvlNode<E> ceiling(
Comparator<? super E> comparator, @ParametricNullness E e) {
int cmp = comparator.compare(e, getElement());
if (cmp < 0) {
return (left == null) ? this : MoreObjects.firstNonNull(left.ceiling(comparator, e), this);
} else if (cmp == 0) {
return this;
} else {
return (right == null) ? null : right.ceiling(comparator, e);
}
}
private @Nullable AvlNode<E> floor(Comparator<? super E> comparator, @ParametricNullness E e) {
int cmp = comparator.compare(e, getElement());
if (cmp > 0) {
return (right == null) ? this : MoreObjects.firstNonNull(right.floor(comparator, e), this);
} else if (cmp == 0) {
return this;
} else {
return (left == null) ? null : left.floor(comparator, e);
}
}
@ParametricNullness
E getElement() {
// For discussion of this cast, see the comment on the elem field.
return uncheckedCastNullableTToT(elem);
}
int getCount() {
return elemCount;
}
@Override
public String toString() {
return Multisets.immutableEntry(getElement(), getCount()).toString();
}
}
private static <T extends @Nullable Object> void successor(AvlNode<T> a, AvlNode<T> b) {
a.succ = b;
b.pred = a;
}
private static <T extends @Nullable Object> void successor(
AvlNode<T> a, AvlNode<T> b, AvlNode<T> c) {
successor(a, b);
successor(b, c);
}
/*
* TODO(jlevy): Decide whether entrySet() should return entries with an equals() method that
* calls the comparator to compare the two keys. If that change is made,
* AbstractMultiset.equals() can simply check whether two multisets have equal entry sets.
*/
/**
* @serialData the comparator, the number of distinct elements, the first element, its count, the
* second element, its count, and so on
*/
@GwtIncompatible
@J2ktIncompatible
private void writeObject(ObjectOutputStream stream) throws IOException {
stream.defaultWriteObject();
stream.writeObject(elementSet().comparator());
Serialization.writeMultiset(this, stream);
}
@J2ktIncompatible
@GwtIncompatible
private void readObject(ObjectInputStream stream) throws IOException, ClassNotFoundException {
stream.defaultReadObject();
@SuppressWarnings("unchecked")
// reading data stored by writeObject
Comparator<? super E> comparator = (Comparator<? super E>) requireNonNull(stream.readObject());
Serialization.getFieldSetter(AbstractSortedMultiset.class, "comparator").set(this, comparator);
Serialization.getFieldSetter(TreeMultiset.class, "range")
.set(this, GeneralRange.all(comparator));
Serialization.getFieldSetter(TreeMultiset.class, "rootReference")
.set(this, new Reference<AvlNode<E>>());
AvlNode<E> header = new AvlNode<>();
Serialization.getFieldSetter(TreeMultiset.class, "header").set(this, header);
successor(header, header);
Serialization.populateMultiset(this, stream);
}
@GwtIncompatible @J2ktIncompatible private static final long serialVersionUID = 1;
}
|
googlearchive/science-journal | 35,043 | OpenScienceJournal/whistlepunk_library/src/main/java/com/google/android/apps/forscience/whistlepunk/DataControllerImpl.java | /*
* Copyright 2016 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.apps.forscience.whistlepunk;
import android.content.ContentResolver;
import android.content.Context;
import android.net.Uri;
import android.util.Log;
import com.google.android.apps.forscience.javalib.Consumer;
import com.google.android.apps.forscience.javalib.FailureListener;
import com.google.android.apps.forscience.javalib.MaybeConsumer;
import com.google.android.apps.forscience.javalib.MaybeConsumers;
import com.google.android.apps.forscience.javalib.Success;
import com.google.android.apps.forscience.whistlepunk.accounts.AppAccount;
import com.google.android.apps.forscience.whistlepunk.api.scalarinput.InputDeviceSpec;
import com.google.android.apps.forscience.whistlepunk.devicemanager.ConnectableSensor;
import com.google.android.apps.forscience.whistlepunk.filemetadata.Experiment;
import com.google.android.apps.forscience.whistlepunk.filemetadata.ExperimentOverviewPojo;
import com.google.android.apps.forscience.whistlepunk.filemetadata.FileMetadataUtil;
import com.google.android.apps.forscience.whistlepunk.filemetadata.FileSyncCollection;
import com.google.android.apps.forscience.whistlepunk.filemetadata.SensorLayoutPojo;
import com.google.android.apps.forscience.whistlepunk.filemetadata.Trial;
import com.google.android.apps.forscience.whistlepunk.metadata.ExperimentSensors;
import com.google.android.apps.forscience.whistlepunk.metadata.ExternalSensorSpec;
import com.google.android.apps.forscience.whistlepunk.metadata.GoosciExperiment;
import com.google.android.apps.forscience.whistlepunk.metadata.GoosciScalarSensorData;
import com.google.android.apps.forscience.whistlepunk.metadata.GoosciScalarSensorData.ScalarSensorDataDump;
import com.google.android.apps.forscience.whistlepunk.metadata.MetaDataManager;
import com.google.android.apps.forscience.whistlepunk.sensorapi.ScalarSensorDumpReader;
import com.google.android.apps.forscience.whistlepunk.sensordb.ScalarReading;
import com.google.android.apps.forscience.whistlepunk.sensordb.ScalarReadingList;
import com.google.android.apps.forscience.whistlepunk.sensordb.SensorDatabase;
import com.google.android.apps.forscience.whistlepunk.sensordb.TimeRange;
import com.google.common.base.Preconditions;
import com.google.common.base.Strings;
import com.google.common.collect.Range;
import io.reactivex.Observable;
import io.reactivex.schedulers.Schedulers;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.lang.ref.WeakReference;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Callable;
import java.util.concurrent.Executor;
public class DataControllerImpl implements DataController, RecordingDataController {
private static final String TAG = "DataControllerImpl";
private final Context context;
private final AppAccount appAccount;
private final SensorDatabase sensorDatabase;
private final Executor uiThread;
private final Executor metaDataThread;
private final Executor sensorDataThread;
private MetaDataManager metaDataManager;
private Clock clock;
private Map<String, FailureListener> sensorFailureListeners = new HashMap<>();
private final Map<String, SensorProvider> providerMap;
private long prevLabelTimestamp = 0;
private Map<String, WeakReference<Experiment>> cachedExperiments = new HashMap<>();
private ConnectableSensor.Connector connector;
public DataControllerImpl(
Context context,
AppAccount appAccount,
SensorDatabase sensorDatabase,
Executor uiThread,
Executor metaDataThread,
Executor sensorDataThread,
MetaDataManager metaDataManager,
Clock clock,
Map<String, SensorProvider> providerMap,
ConnectableSensor.Connector connector) {
this.context = context;
this.appAccount = appAccount;
this.sensorDatabase = sensorDatabase;
this.uiThread = uiThread;
this.metaDataThread = metaDataThread;
this.sensorDataThread = sensorDataThread;
this.metaDataManager = metaDataManager;
this.clock = clock;
this.providerMap = providerMap;
this.connector = connector;
}
public void replaceSensorInExperiment(
final String experimentId,
final String oldSensorId,
final String newSensorId,
final MaybeConsumer<Success> onSuccess) {
getExperimentById(
experimentId,
MaybeConsumers.chainFailure(
onSuccess,
new Consumer<Experiment>() {
@Override
public void take(final Experiment experiment) {
replaceIdInLayouts(experiment, oldSensorId, newSensorId);
background(
metaDataThread,
onSuccess,
new Callable<Success>() {
@Override
public Success call() throws Exception {
metaDataManager.eraseSensorFromExperiment(oldSensorId, experimentId);
metaDataManager.addSensorToExperiment(newSensorId, experimentId);
// No need to mark the experiment as dirty, as sensors do not sync.
// True would also be ok, but would just add an extra sync.
metaDataManager.updateExperiment(experiment, false);
return Success.SUCCESS;
}
});
}
}));
}
private void replaceIdInLayouts(Experiment experiment, String oldSensorId, String newSensorId) {
for (SensorLayoutPojo layout : experiment.getSensorLayouts()) {
if (layout.getSensorId().equals(oldSensorId)) {
layout.setSensorId(newSensorId);
}
}
}
private void removeTrialSensorData(final Trial trial) {
sensorDataThread.execute(
() -> {
long firstTimestamp = trial.getOriginalFirstTimestamp();
long lastTimestamp = trial.getOriginalLastTimestamp();
if (firstTimestamp > lastTimestamp) {
// TODO: Need a way to clean up invalid old data properly. For now, just
// continue to ignore it because we cannot be sure where to stop deleting.
return;
}
TimeRange times = TimeRange.oldest(Range.closed(firstTimestamp, lastTimestamp));
for (String tag : trial.getSensorIds()) {
sensorDatabase.deleteScalarReadings(trial.getTrialId(), tag, times);
}
});
}
@Override
public void addScalarReadings(List<BatchInsertScalarReading> readings) {
sensorDataThread.execute(
new Runnable() {
@Override
public void run() {
try {
sensorDatabase.addScalarReadings(readings);
} catch (final Exception e) {
uiThread.execute(
new Runnable() {
@Override
public void run() {
notifyFailureListener("batchImport", e);
}
});
}
}
});
}
@Override
public void addScalarReading(
final String trialId,
final String sensorId,
final int resolutionTier,
final long timestampMillis,
final double value) {
sensorDataThread.execute(
new Runnable() {
@Override
public void run() {
try {
sensorDatabase.addScalarReading(
trialId, sensorId, resolutionTier, timestampMillis, value);
} catch (final Exception e) {
uiThread.execute(
new Runnable() {
@Override
public void run() {
notifyFailureListener(sensorId, e);
}
});
}
}
});
}
private void notifyFailureListener(String sensorId, Exception e) {
FailureListener listener = sensorFailureListeners.get(sensorId);
if (listener != null) {
listener.fail(e);
}
}
@Override
public void getScalarReadings(
final String trialId,
final String databaseTag,
final int resolutionTier,
final TimeRange timeRange,
final int maxRecords,
final MaybeConsumer<ScalarReadingList> onSuccess) {
Preconditions.checkNotNull(databaseTag);
background(
sensorDataThread,
onSuccess,
new Callable<ScalarReadingList>() {
@Override
public ScalarReadingList call() throws Exception {
return sensorDatabase.getScalarReadings(
trialId, databaseTag, timeRange, resolutionTier, maxRecords);
}
});
}
@Override
public void getScalarReadingProtosInBackground(
GoosciExperiment.Experiment experiment,
final MaybeConsumer<GoosciScalarSensorData.ScalarSensorData> onSuccess) {
Preconditions.checkNotNull(experiment);
sensorDataThread.execute(
() -> {
onSuccess.success(sensorDatabase.getScalarReadingProtos(experiment));
});
}
@Override
public Observable<ScalarReading> createScalarObservable(
final String trialId,
final String[] sensorIds,
final TimeRange timeRange,
final int resolutionTier) {
return sensorDatabase
.createScalarObservable(trialId, sensorIds, timeRange, resolutionTier)
.observeOn(Schedulers.from(sensorDataThread));
}
@Override
public void deleteTrialData(final Trial trial, MaybeConsumer<Success> onSuccess) {
background(
metaDataThread,
onSuccess,
new Callable<Success>() {
@Override
public Success call() throws Exception {
removeTrialSensorData(trial);
return Success.SUCCESS;
}
});
}
@Override
public void createExperiment(final MaybeConsumer<Experiment> onSuccess) {
MaybeConsumer<Experiment> onSuccessWrapper =
MaybeConsumers.chainFailure(
onSuccess,
new Consumer<Experiment>() {
@Override
public void take(Experiment experiment) {
cacheExperiment(experiment);
onSuccess.success(experiment);
}
});
background(
metaDataThread,
onSuccessWrapper,
new Callable<Experiment>() {
@Override
public Experiment call() throws Exception {
Experiment experiment = metaDataManager.newExperiment();
return experiment;
}
});
}
@Override
public void deleteExperiment(final String experimentId, final MaybeConsumer<Success> onSuccess) {
if (cachedExperiments.containsKey(experimentId)) {
cachedExperiments.remove(experimentId);
}
background(
metaDataThread,
onSuccess,
new Callable<Success>() {
@Override
public Success call() throws Exception {
deleteExperimentOnDataThread(experimentId);
return Success.SUCCESS;
}
});
}
@Override
public void deleteExperiment(
final Experiment experiment, final MaybeConsumer<Success> onSuccess) {
if (cachedExperiments.containsKey(experiment.getExperimentId())) {
cachedExperiments.remove(experiment.getExperimentId());
}
background(
metaDataThread,
onSuccess,
new Callable<Success>() {
@Override
public Success call() throws Exception {
deleteExperimentOnDataThread(experiment);
return Success.SUCCESS;
}
});
}
private void deleteExperimentOnDataThread(Experiment experiment) {
// TODO: delete invalid run data, as well (b/35794788)
metaDataManager.deleteExperiment(experiment);
}
private void deleteExperimentOnDataThread(String experimentId) {
// TODO: delete invalid run data, as well (b/35794788)
metaDataManager.deleteExperiment(experimentId);
}
@Override
public void getExperimentById(
final String experimentId, final MaybeConsumer<Experiment> onSuccess) {
if (cachedExperiments.containsKey(experimentId)) {
Experiment experiment = cachedExperiments.get(experimentId).get();
if (experiment != null) {
// We are already caching this one
onSuccess.success(experiment);
return;
}
}
MaybeConsumer<Experiment> onSuccessWrapper =
MaybeConsumers.chainFailure(
onSuccess,
new Consumer<Experiment>() {
@Override
public void take(Experiment experiment) {
cachedExperiments.put(experimentId, new WeakReference<>(experiment));
onSuccess.success(experiment);
}
});
background(
metaDataThread,
onSuccessWrapper,
new Callable<Experiment>() {
@Override
public Experiment call() throws Exception {
Experiment result = metaDataManager.getExperimentById(experimentId);
if (result == null) {
throw new IllegalArgumentException(
"Could not find experiment with id " + experimentId);
}
return result;
}
});
}
@Override
public void experimentExists(
final String experimentId, final MaybeConsumer<Boolean> onSuccess) {
if (cachedExperiments.containsKey(experimentId)) {
Experiment experiment = cachedExperiments.get(experimentId).get();
if (experiment != null) {
// We are already caching this one
onSuccess.success(true);
return;
}
}
background(
metaDataThread,
onSuccess,
new Callable<Boolean>() {
@Override
public Boolean call() {
Experiment result = metaDataManager.getExperimentById(experimentId);
if (result == null) {
return false;
}
return true;
}
});
}
@Override
public void updateExperiment(
final String experimentId,
long lastUpdateTime,
boolean setDirty,
MaybeConsumer<Success> onSuccess) {
if (!cachedExperiments.containsKey(experimentId)) {
onSuccess.fail(new Exception("Experiment not loaded"));
return;
}
final Experiment experiment = cachedExperiments.get(experimentId).get();
if (experiment == null) {
onSuccess.fail(new Exception("Experiment not loaded"));
return;
}
updateExperiment(experiment, lastUpdateTime, setDirty, onSuccess);
}
@Override
public void updateExperiment(final String experimentId, MaybeConsumer<Success> onSuccess) {
updateExperiment(experimentId, clock.getNow(), true, onSuccess);
}
@Override
public void updateExperiment(
final String experimentId, boolean setDirty, MaybeConsumer<Success> onSuccess) {
updateExperiment(experimentId, clock.getNow(), setDirty, onSuccess);
}
@Override
public void updateExperiment(Experiment experiment, MaybeConsumer<Success> onSuccess) {
updateExperiment(experiment, clock.getNow(), false, onSuccess);
}
@Override
public void updateExperiment(
Experiment experiment,
long lastUsedTime,
boolean setDirty,
MaybeConsumer<Success> onSuccess) {
if (!cachedExperiments.containsKey(experiment.getExperimentId())) {
throw new IllegalArgumentException(
"Updating experiment not returned by DataController: " + experiment);
}
if (cachedExperiments.get(experiment.getExperimentId()).get() != experiment) {
throw new IllegalArgumentException(
"Updating different instance of experiment than is managed by DataController: "
+ experiment);
}
// Every time we update the experiment, we can update its last used time.
experiment.setLastUsedTime(lastUsedTime);
background(
metaDataThread,
onSuccess,
() -> {
metaDataManager.updateExperiment(experiment, setDirty);
return Success.SUCCESS;
});
}
@Override
public void updateExperimentEvenIfNotActive(
Experiment experiment,
long lastUsedTime,
boolean setDirty,
MaybeConsumer<Success> onSuccess) {
if (!cachedExperiments.containsKey(experiment.getExperimentId())) {
Log.e(TAG, "Updating Non Active: " + experiment);
}
// Every time we update the experiment, we can update its last used time.
experiment.setLastUsedTime(lastUsedTime);
background(
metaDataThread,
onSuccess,
() -> {
metaDataManager.updateExperiment(experiment, setDirty);
return Success.SUCCESS;
});
}
@Override
public void saveImmediately(MaybeConsumer<Success> onSuccess) {
background(
metaDataThread,
onSuccess,
() -> {
metaDataManager.saveImmediately();
return Success.SUCCESS;
});
}
@Override
public void addExperiment(Experiment experiment, MaybeConsumer<Success> onSuccess) {
if (cachedExperiments.containsKey(experiment.getExperimentId())) {
throw new IllegalArgumentException(
"Adding experiment already returned by DataController: " + experiment);
}
background(
metaDataThread,
onSuccess,
() -> {
metaDataManager.addExperiment(experiment);
cacheExperiment(experiment);
return Success.SUCCESS;
});
}
@Override
public void mergeExperiment(
String experimentId,
Experiment toMerge,
boolean overwrite,
MaybeConsumer<FileSyncCollection> onSuccess) {
background(
metaDataThread,
onSuccess,
new Callable<FileSyncCollection>() {
@Override
public FileSyncCollection call() throws Exception {
Experiment result = metaDataManager.getExperimentById(experimentId);
if (result == null) {
throw new IllegalArgumentException(
"Could not find experiment with id " + experimentId);
}
FileSyncCollection sync = result.mergeFrom(toMerge, context, appAccount, overwrite);
if (Strings.isNullOrEmpty(result.getTitle())) {
result.setTitle(toMerge.getTitle());
}
metaDataManager.updateExperiment(result, false);
metaDataManager.saveImmediately();
cachedExperiments.put(experimentId, new WeakReference<>(result));
return sync;
}
});
}
@Override
public String generateNewLabelId() {
long nextLabelTimestamp = clock.getNow();
if (nextLabelTimestamp <= prevLabelTimestamp) {
// Make sure we never use the same label ID twice.
nextLabelTimestamp = prevLabelTimestamp + 1;
}
prevLabelTimestamp = nextLabelTimestamp;
return "label_" + nextLabelTimestamp;
}
@Override
public void getExperimentOverviews(
final boolean includeArchived, final MaybeConsumer<List<ExperimentOverviewPojo>> onSuccess) {
background(
metaDataThread,
onSuccess,
new Callable<List<ExperimentOverviewPojo>>() {
@Override
public List<ExperimentOverviewPojo> call() throws Exception {
return metaDataManager.getExperimentOverviews(includeArchived);
}
});
}
@Override
public List<ExperimentOverviewPojo> blockingGetExperimentOverviews(boolean includeArchived) {
return metaDataManager.getExperimentOverviews(includeArchived);
}
@Override
public void getLastUsedUnarchivedExperiment(final MaybeConsumer<Experiment> onSuccess) {
MaybeConsumer<Experiment> onSuccessWrapper =
new MaybeConsumer<Experiment>() {
@Override
public void success(Experiment lastUsed) {
if (lastUsed == null) {
onSuccess.success(null);
return;
}
if (cachedExperiments.containsKey(lastUsed.getExperimentId())) {
// Use the same object if it's already in the cache.
Experiment cached = cachedExperiments.get(lastUsed.getExperimentId()).get();
if (cached != null) {
onSuccess.success(cached);
return;
}
}
cacheExperiment(lastUsed);
onSuccess.success(lastUsed);
}
@Override
public void fail(Exception e) {
onSuccess.fail(e);
}
};
background(
metaDataThread,
onSuccessWrapper,
new Callable<Experiment>() {
@Override
public Experiment call() throws Exception {
return metaDataManager.getLastUsedUnarchivedExperiment();
}
});
}
@Override
public void importExperimentFromZip(
final Uri zipUri, ContentResolver resolver, final MaybeConsumer<String> onSuccess) {
background(
metaDataThread,
onSuccess,
new Callable<String>() {
@Override
public String call() throws Exception {
Experiment experiment = metaDataManager.importExperimentFromZip(zipUri, resolver);
cacheExperiment(experiment);
return experiment.getExperimentId();
}
});
}
private void cacheExperiment(Experiment experiment) {
cachedExperiments.put(experiment.getExperimentId(), new WeakReference<>(experiment));
}
@Override
public void getExternalSensors(final MaybeConsumer<Map<String, ExternalSensorSpec>> onSuccess) {
background(
metaDataThread,
onSuccess,
new Callable<Map<String, ExternalSensorSpec>>() {
@Override
public Map<String, ExternalSensorSpec> call() throws Exception {
return metaDataManager.getExternalSensors(providerMap);
}
});
}
@Override
public void getExternalSensorsByExperiment(
final String experimentId, final MaybeConsumer<ExperimentSensors> onSuccess) {
background(
metaDataThread,
onSuccess,
new Callable<ExperimentSensors>() {
@Override
public ExperimentSensors call() throws Exception {
return metaDataManager.getExperimentSensors(experimentId, providerMap, connector);
}
});
}
@Override
public void getExternalSensorById(
final String id, final MaybeConsumer<ExternalSensorSpec> onSuccess) {
background(
metaDataThread,
onSuccess,
new Callable<ExternalSensorSpec>() {
@Override
public ExternalSensorSpec call() throws Exception {
return metaDataManager.getExternalSensorById(id, providerMap);
}
});
}
@Override
public void addSensorToExperiment(
final String experimentId, final String sensorId, final MaybeConsumer<Success> onSuccess) {
background(
metaDataThread,
onSuccess,
new Callable<Success>() {
@Override
public Success call() throws Exception {
metaDataManager.addSensorToExperiment(sensorId, experimentId);
return Success.SUCCESS;
}
});
}
@Override
public void removeSensorFromExperiment(
final String experimentId, final String sensorId, final MaybeConsumer<Success> onSuccess) {
getExperimentById(
experimentId,
MaybeConsumers.chainFailure(
onSuccess,
new Consumer<Experiment>() {
@Override
public void take(final Experiment experiment) {
replaceIdInLayouts(experiment, sensorId, "");
background(
metaDataThread,
onSuccess,
new Callable<Success>() {
@Override
public Success call() throws Exception {
metaDataManager.removeSensorFromExperiment(sensorId, experimentId);
metaDataManager.updateExperiment(experiment, false);
return Success.SUCCESS;
}
});
}
}));
}
@Override
public void eraseSensorFromExperiment(
final String experimentId, final String sensorId, final MaybeConsumer<Success> onSuccess) {
getExperimentById(
experimentId,
MaybeConsumers.chainFailure(
onSuccess,
new Consumer<Experiment>() {
@Override
public void take(final Experiment experiment) {
replaceIdInLayouts(experiment, sensorId, "");
background(
metaDataThread,
onSuccess,
new Callable<Success>() {
@Override
public Success call() throws Exception {
metaDataManager.eraseSensorFromExperiment(sensorId, experimentId);
metaDataManager.updateExperiment(experiment, false);
return Success.SUCCESS;
}
});
}
}));
}
@Override
public void setDataErrorListenerForSensor(String sensorId, FailureListener listener) {
sensorFailureListeners.put(sensorId, listener);
}
@Override
public void clearDataErrorListenerForSensor(String sensorId) {
sensorFailureListeners.remove(sensorId);
}
@Override
public void addOrGetExternalSensor(
final ExternalSensorSpec sensor, final MaybeConsumer<String> onSensorId) {
background(
metaDataThread,
onSensorId,
new Callable<String>() {
@Override
public String call() throws Exception {
return metaDataManager.addOrGetExternalSensor(sensor, providerMap);
}
});
}
private <T> void background(
Executor dataThread, final MaybeConsumer<T> onSuccess, final Callable<T> job) {
RuntimeException runtimeExceptionWithOriginalStackTrace =
new RuntimeException(
"This is the stack trace for thread "
+ Thread.currentThread()
+ ", which called DataControllerImpl.background().");
dataThread.execute(
new Runnable() {
@Override
public void run() {
try {
final T result = job.call();
uiThread.execute(
new Runnable() {
@Override
public void run() {
onSuccess.success(result);
}
});
} catch (final Exception e) {
Log.e(
TAG,
"Caught exception (" + e + ") while executing background job.",
runtimeExceptionWithOriginalStackTrace);
uiThread.execute(
new Runnable() {
@Override
public void run() {
onSuccess.fail(e);
}
});
}
}
});
}
@Override
public void getMyDevices(MaybeConsumer<List<InputDeviceSpec>> onSuccess) {
background(
metaDataThread,
onSuccess,
new Callable<List<InputDeviceSpec>>() {
@Override
public List<InputDeviceSpec> call() throws Exception {
return metaDataManager.getMyDevices();
}
});
}
@Override
public void addMyDevice(final InputDeviceSpec spec, MaybeConsumer<Success> onSuccess) {
background(
metaDataThread,
onSuccess,
new Callable<Success>() {
@Override
public Success call() throws Exception {
metaDataManager.addMyDevice(spec);
return Success.SUCCESS;
}
});
}
@Override
public void forgetMyDevice(final InputDeviceSpec spec, MaybeConsumer<Success> onSuccess) {
background(
metaDataThread,
onSuccess,
new Callable<Success>() {
@Override
public Success call() throws Exception {
metaDataManager.removeMyDevice(spec);
return Success.SUCCESS;
}
});
}
@Override
public AppAccount getAppAccount() {
return appAccount;
}
@Override
public void moveAllExperimentsToAnotherAccount(
AppAccount targetAccount, final MaybeConsumer<Success> onSuccess) {
background(
metaDataThread,
onSuccess,
new Callable<Success>() {
@Override
public Success call() throws Exception {
moveAllExperimentsToAnotherAccountOnDataThread(targetAccount);
cachedExperiments.clear();
return Success.SUCCESS;
}
});
}
private void moveAllExperimentsToAnotherAccountOnDataThread(AppAccount targetAccount)
throws IOException {
metaDataManager.saveImmediately();
// Move each experiment, one at a time.
List<ExperimentOverviewPojo> experiments =
blockingGetExperimentOverviews(true /* includeArchived */);
for (ExperimentOverviewPojo overview : experiments) {
Experiment experiment = getExperimentFromId(overview.getExperimentId());
moveExperimentToAnotherAccountOnDataThread(experiment, targetAccount);
}
}
@Override
public void deleteAllExperiments(final MaybeConsumer<Success> onSuccess) {
background(
metaDataThread,
onSuccess,
new Callable<Success>() {
@Override
public Success call() throws Exception {
deleteAllExperimentsOnDataThread();
cachedExperiments.clear();
return Success.SUCCESS;
}
});
}
private void deleteAllExperimentsOnDataThread() {
List<ExperimentOverviewPojo> experiments =
blockingGetExperimentOverviews(true /* includeArchived */);
for (ExperimentOverviewPojo overview : experiments) {
Experiment experiment = getExperimentFromId(overview.getExperimentId());
deleteExperimentOnDataThread(experiment);
}
}
private Experiment getExperimentFromId(String experimentId) {
Experiment experiment = null;
if (cachedExperiments.containsKey(experimentId)) {
experiment = cachedExperiments.get(experimentId).get();
}
if (experiment == null) {
// Even if the experiment id is in the cache, the experiment might still be null
// if it has been garbage collected.
experiment = metaDataManager.getExperimentById(experimentId);
}
return experiment;
}
@Override
public void moveExperimentToAnotherAccount(
String experimentId, AppAccount targetAccount, MaybeConsumer<Success> onSuccess) {
if (cachedExperiments.containsKey(experimentId)) {
cachedExperiments.remove(experimentId);
}
getExperimentById(
experimentId,
MaybeConsumers.chainFailure(
onSuccess,
new Consumer<Experiment>() {
@Override
public void take(final Experiment experiment) {
background(
metaDataThread,
onSuccess,
new Callable<Success>() {
@Override
public Success call() throws Exception {
moveExperimentToAnotherAccountOnDataThread(experiment, targetAccount);
return Success.SUCCESS;
}
});
}
}));
}
private void moveExperimentToAnotherAccountOnDataThread(
Experiment experiment, AppAccount targetAccount) throws IOException {
DataControllerImpl targetDataController =
(DataControllerImpl) AppSingleton.getInstance(context).getDataController(targetAccount);
metaDataManager.saveImmediately();
metaDataManager.beforeMovingExperimentToAnotherAccount(experiment);
//TODO(b/129534983): Write test that covers an Exception when claiming experiments
try {
// Move files.
metaDataManager.moveExperimentToAnotherAccount(experiment, targetAccount);
// Move scalar sensor data.
List<ScalarSensorDataDump> scalarSensorData =
sensorDatabase.getScalarReadingProtosAsList(experiment.getExperimentProto());
ScalarSensorDumpReader scalarSensorDumpReader =
new ScalarSensorDumpReader(targetDataController);
scalarSensorDumpReader.readData(scalarSensorData);
for (Trial trial : experiment.getTrials()) {
removeTrialSensorData(trial);
}
targetDataController.metaDataManager.afterMovingExperimentFromAnotherAccount(experiment);
} catch (Exception e) {
// Re-add it to the original MetaDataManager
metaDataManager.afterMovingExperimentFromAnotherAccount(experiment);
throw e;
}
}
@Override
public void writeTrialProtoToFile(
String experimentId, String trialId, final MaybeConsumer<File> onSuccess) throws IOException {
getExperimentById(
experimentId,
MaybeConsumers.chainFailure(
onSuccess,
new Consumer<Experiment>() {
@Override
public void take(final Experiment experiment) {
background(
metaDataThread,
onSuccess,
new Callable<File>() {
@Override
public File call() throws Exception {
GoosciScalarSensorData.ScalarSensorData proto =
sensorDatabase.getScalarReadingProtosForTrial(
experiment.getExperimentProto(), trialId);
File sensorProtoFile =
new File(
FileMetadataUtil.getInstance()
.getExperimentDirectory(
appAccount, experiment.getExperimentId()),
FileMetadataUtil.getInstance().getTrialProtoFileName(trialId));
try (FileOutputStream sensorStream =
new FileOutputStream(sensorProtoFile)) {
proto.writeTo(sensorStream);
return sensorProtoFile;
} catch (IOException ioException) {
return null;
}
}
});
}
}));
}
}
|
googleapis/google-cloud-java | 35,062 | java-document-ai/proto-google-cloud-document-ai-v1beta3/src/main/java/com/google/cloud/documentai/v1beta3/BatchDeleteDocumentsRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/documentai/v1beta3/document_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.documentai.v1beta3;
/** Protobuf type {@code google.cloud.documentai.v1beta3.BatchDeleteDocumentsRequest} */
public final class BatchDeleteDocumentsRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.documentai.v1beta3.BatchDeleteDocumentsRequest)
BatchDeleteDocumentsRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use BatchDeleteDocumentsRequest.newBuilder() to construct.
private BatchDeleteDocumentsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private BatchDeleteDocumentsRequest() {
dataset_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new BatchDeleteDocumentsRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.documentai.v1beta3.DocumentAiDocumentService
.internal_static_google_cloud_documentai_v1beta3_BatchDeleteDocumentsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.documentai.v1beta3.DocumentAiDocumentService
.internal_static_google_cloud_documentai_v1beta3_BatchDeleteDocumentsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.documentai.v1beta3.BatchDeleteDocumentsRequest.class,
com.google.cloud.documentai.v1beta3.BatchDeleteDocumentsRequest.Builder.class);
}
private int bitField0_;
public static final int DATASET_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object dataset_ = "";
/**
*
*
* <pre>
* Required. The dataset resource name.
* Format:
* projects/{project}/locations/{location}/processors/{processor}/dataset
* </pre>
*
* <code>string dataset = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The dataset.
*/
@java.lang.Override
public java.lang.String getDataset() {
java.lang.Object ref = dataset_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
dataset_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The dataset resource name.
* Format:
* projects/{project}/locations/{location}/processors/{processor}/dataset
* </pre>
*
* <code>string dataset = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for dataset.
*/
@java.lang.Override
public com.google.protobuf.ByteString getDatasetBytes() {
java.lang.Object ref = dataset_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
dataset_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int DATASET_DOCUMENTS_FIELD_NUMBER = 3;
private com.google.cloud.documentai.v1beta3.BatchDatasetDocuments datasetDocuments_;
/**
*
*
* <pre>
* Required. Dataset documents input. If given `filter`, all documents
* satisfying the filter will be deleted. If given documentIds, a maximum of
* 50 documents can be deleted in a batch. The request will be rejected if
* more than 50 document_ids are provided.
* </pre>
*
* <code>
* .google.cloud.documentai.v1beta3.BatchDatasetDocuments dataset_documents = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the datasetDocuments field is set.
*/
@java.lang.Override
public boolean hasDatasetDocuments() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. Dataset documents input. If given `filter`, all documents
* satisfying the filter will be deleted. If given documentIds, a maximum of
* 50 documents can be deleted in a batch. The request will be rejected if
* more than 50 document_ids are provided.
* </pre>
*
* <code>
* .google.cloud.documentai.v1beta3.BatchDatasetDocuments dataset_documents = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The datasetDocuments.
*/
@java.lang.Override
public com.google.cloud.documentai.v1beta3.BatchDatasetDocuments getDatasetDocuments() {
return datasetDocuments_ == null
? com.google.cloud.documentai.v1beta3.BatchDatasetDocuments.getDefaultInstance()
: datasetDocuments_;
}
/**
*
*
* <pre>
* Required. Dataset documents input. If given `filter`, all documents
* satisfying the filter will be deleted. If given documentIds, a maximum of
* 50 documents can be deleted in a batch. The request will be rejected if
* more than 50 document_ids are provided.
* </pre>
*
* <code>
* .google.cloud.documentai.v1beta3.BatchDatasetDocuments dataset_documents = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.documentai.v1beta3.BatchDatasetDocumentsOrBuilder
getDatasetDocumentsOrBuilder() {
return datasetDocuments_ == null
? com.google.cloud.documentai.v1beta3.BatchDatasetDocuments.getDefaultInstance()
: datasetDocuments_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(dataset_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, dataset_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(3, getDatasetDocuments());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(dataset_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, dataset_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getDatasetDocuments());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.documentai.v1beta3.BatchDeleteDocumentsRequest)) {
return super.equals(obj);
}
com.google.cloud.documentai.v1beta3.BatchDeleteDocumentsRequest other =
(com.google.cloud.documentai.v1beta3.BatchDeleteDocumentsRequest) obj;
if (!getDataset().equals(other.getDataset())) return false;
if (hasDatasetDocuments() != other.hasDatasetDocuments()) return false;
if (hasDatasetDocuments()) {
if (!getDatasetDocuments().equals(other.getDatasetDocuments())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + DATASET_FIELD_NUMBER;
hash = (53 * hash) + getDataset().hashCode();
if (hasDatasetDocuments()) {
hash = (37 * hash) + DATASET_DOCUMENTS_FIELD_NUMBER;
hash = (53 * hash) + getDatasetDocuments().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.documentai.v1beta3.BatchDeleteDocumentsRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.documentai.v1beta3.BatchDeleteDocumentsRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.documentai.v1beta3.BatchDeleteDocumentsRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.documentai.v1beta3.BatchDeleteDocumentsRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.documentai.v1beta3.BatchDeleteDocumentsRequest parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.documentai.v1beta3.BatchDeleteDocumentsRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.documentai.v1beta3.BatchDeleteDocumentsRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.documentai.v1beta3.BatchDeleteDocumentsRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.documentai.v1beta3.BatchDeleteDocumentsRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.documentai.v1beta3.BatchDeleteDocumentsRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.documentai.v1beta3.BatchDeleteDocumentsRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.documentai.v1beta3.BatchDeleteDocumentsRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.documentai.v1beta3.BatchDeleteDocumentsRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/** Protobuf type {@code google.cloud.documentai.v1beta3.BatchDeleteDocumentsRequest} */
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.documentai.v1beta3.BatchDeleteDocumentsRequest)
com.google.cloud.documentai.v1beta3.BatchDeleteDocumentsRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.documentai.v1beta3.DocumentAiDocumentService
.internal_static_google_cloud_documentai_v1beta3_BatchDeleteDocumentsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.documentai.v1beta3.DocumentAiDocumentService
.internal_static_google_cloud_documentai_v1beta3_BatchDeleteDocumentsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.documentai.v1beta3.BatchDeleteDocumentsRequest.class,
com.google.cloud.documentai.v1beta3.BatchDeleteDocumentsRequest.Builder.class);
}
// Construct using com.google.cloud.documentai.v1beta3.BatchDeleteDocumentsRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getDatasetDocumentsFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
dataset_ = "";
datasetDocuments_ = null;
if (datasetDocumentsBuilder_ != null) {
datasetDocumentsBuilder_.dispose();
datasetDocumentsBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.documentai.v1beta3.DocumentAiDocumentService
.internal_static_google_cloud_documentai_v1beta3_BatchDeleteDocumentsRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.documentai.v1beta3.BatchDeleteDocumentsRequest
getDefaultInstanceForType() {
return com.google.cloud.documentai.v1beta3.BatchDeleteDocumentsRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.documentai.v1beta3.BatchDeleteDocumentsRequest build() {
com.google.cloud.documentai.v1beta3.BatchDeleteDocumentsRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.documentai.v1beta3.BatchDeleteDocumentsRequest buildPartial() {
com.google.cloud.documentai.v1beta3.BatchDeleteDocumentsRequest result =
new com.google.cloud.documentai.v1beta3.BatchDeleteDocumentsRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.documentai.v1beta3.BatchDeleteDocumentsRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.dataset_ = dataset_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.datasetDocuments_ =
datasetDocumentsBuilder_ == null ? datasetDocuments_ : datasetDocumentsBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.documentai.v1beta3.BatchDeleteDocumentsRequest) {
return mergeFrom((com.google.cloud.documentai.v1beta3.BatchDeleteDocumentsRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.documentai.v1beta3.BatchDeleteDocumentsRequest other) {
if (other
== com.google.cloud.documentai.v1beta3.BatchDeleteDocumentsRequest.getDefaultInstance())
return this;
if (!other.getDataset().isEmpty()) {
dataset_ = other.dataset_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.hasDatasetDocuments()) {
mergeDatasetDocuments(other.getDatasetDocuments());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
dataset_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 26:
{
input.readMessage(
getDatasetDocumentsFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object dataset_ = "";
/**
*
*
* <pre>
* Required. The dataset resource name.
* Format:
* projects/{project}/locations/{location}/processors/{processor}/dataset
* </pre>
*
* <code>string dataset = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The dataset.
*/
public java.lang.String getDataset() {
java.lang.Object ref = dataset_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
dataset_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The dataset resource name.
* Format:
* projects/{project}/locations/{location}/processors/{processor}/dataset
* </pre>
*
* <code>string dataset = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for dataset.
*/
public com.google.protobuf.ByteString getDatasetBytes() {
java.lang.Object ref = dataset_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
dataset_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The dataset resource name.
* Format:
* projects/{project}/locations/{location}/processors/{processor}/dataset
* </pre>
*
* <code>string dataset = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The dataset to set.
* @return This builder for chaining.
*/
public Builder setDataset(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
dataset_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The dataset resource name.
* Format:
* projects/{project}/locations/{location}/processors/{processor}/dataset
* </pre>
*
* <code>string dataset = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearDataset() {
dataset_ = getDefaultInstance().getDataset();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The dataset resource name.
* Format:
* projects/{project}/locations/{location}/processors/{processor}/dataset
* </pre>
*
* <code>string dataset = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for dataset to set.
* @return This builder for chaining.
*/
public Builder setDatasetBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
dataset_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private com.google.cloud.documentai.v1beta3.BatchDatasetDocuments datasetDocuments_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.documentai.v1beta3.BatchDatasetDocuments,
com.google.cloud.documentai.v1beta3.BatchDatasetDocuments.Builder,
com.google.cloud.documentai.v1beta3.BatchDatasetDocumentsOrBuilder>
datasetDocumentsBuilder_;
/**
*
*
* <pre>
* Required. Dataset documents input. If given `filter`, all documents
* satisfying the filter will be deleted. If given documentIds, a maximum of
* 50 documents can be deleted in a batch. The request will be rejected if
* more than 50 document_ids are provided.
* </pre>
*
* <code>
* .google.cloud.documentai.v1beta3.BatchDatasetDocuments dataset_documents = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the datasetDocuments field is set.
*/
public boolean hasDatasetDocuments() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. Dataset documents input. If given `filter`, all documents
* satisfying the filter will be deleted. If given documentIds, a maximum of
* 50 documents can be deleted in a batch. The request will be rejected if
* more than 50 document_ids are provided.
* </pre>
*
* <code>
* .google.cloud.documentai.v1beta3.BatchDatasetDocuments dataset_documents = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The datasetDocuments.
*/
public com.google.cloud.documentai.v1beta3.BatchDatasetDocuments getDatasetDocuments() {
if (datasetDocumentsBuilder_ == null) {
return datasetDocuments_ == null
? com.google.cloud.documentai.v1beta3.BatchDatasetDocuments.getDefaultInstance()
: datasetDocuments_;
} else {
return datasetDocumentsBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. Dataset documents input. If given `filter`, all documents
* satisfying the filter will be deleted. If given documentIds, a maximum of
* 50 documents can be deleted in a batch. The request will be rejected if
* more than 50 document_ids are provided.
* </pre>
*
* <code>
* .google.cloud.documentai.v1beta3.BatchDatasetDocuments dataset_documents = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setDatasetDocuments(
com.google.cloud.documentai.v1beta3.BatchDatasetDocuments value) {
if (datasetDocumentsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
datasetDocuments_ = value;
} else {
datasetDocumentsBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Dataset documents input. If given `filter`, all documents
* satisfying the filter will be deleted. If given documentIds, a maximum of
* 50 documents can be deleted in a batch. The request will be rejected if
* more than 50 document_ids are provided.
* </pre>
*
* <code>
* .google.cloud.documentai.v1beta3.BatchDatasetDocuments dataset_documents = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setDatasetDocuments(
com.google.cloud.documentai.v1beta3.BatchDatasetDocuments.Builder builderForValue) {
if (datasetDocumentsBuilder_ == null) {
datasetDocuments_ = builderForValue.build();
} else {
datasetDocumentsBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Dataset documents input. If given `filter`, all documents
* satisfying the filter will be deleted. If given documentIds, a maximum of
* 50 documents can be deleted in a batch. The request will be rejected if
* more than 50 document_ids are provided.
* </pre>
*
* <code>
* .google.cloud.documentai.v1beta3.BatchDatasetDocuments dataset_documents = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeDatasetDocuments(
com.google.cloud.documentai.v1beta3.BatchDatasetDocuments value) {
if (datasetDocumentsBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& datasetDocuments_ != null
&& datasetDocuments_
!= com.google.cloud.documentai.v1beta3.BatchDatasetDocuments.getDefaultInstance()) {
getDatasetDocumentsBuilder().mergeFrom(value);
} else {
datasetDocuments_ = value;
}
} else {
datasetDocumentsBuilder_.mergeFrom(value);
}
if (datasetDocuments_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. Dataset documents input. If given `filter`, all documents
* satisfying the filter will be deleted. If given documentIds, a maximum of
* 50 documents can be deleted in a batch. The request will be rejected if
* more than 50 document_ids are provided.
* </pre>
*
* <code>
* .google.cloud.documentai.v1beta3.BatchDatasetDocuments dataset_documents = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearDatasetDocuments() {
bitField0_ = (bitField0_ & ~0x00000002);
datasetDocuments_ = null;
if (datasetDocumentsBuilder_ != null) {
datasetDocumentsBuilder_.dispose();
datasetDocumentsBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Dataset documents input. If given `filter`, all documents
* satisfying the filter will be deleted. If given documentIds, a maximum of
* 50 documents can be deleted in a batch. The request will be rejected if
* more than 50 document_ids are provided.
* </pre>
*
* <code>
* .google.cloud.documentai.v1beta3.BatchDatasetDocuments dataset_documents = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.documentai.v1beta3.BatchDatasetDocuments.Builder
getDatasetDocumentsBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getDatasetDocumentsFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. Dataset documents input. If given `filter`, all documents
* satisfying the filter will be deleted. If given documentIds, a maximum of
* 50 documents can be deleted in a batch. The request will be rejected if
* more than 50 document_ids are provided.
* </pre>
*
* <code>
* .google.cloud.documentai.v1beta3.BatchDatasetDocuments dataset_documents = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.documentai.v1beta3.BatchDatasetDocumentsOrBuilder
getDatasetDocumentsOrBuilder() {
if (datasetDocumentsBuilder_ != null) {
return datasetDocumentsBuilder_.getMessageOrBuilder();
} else {
return datasetDocuments_ == null
? com.google.cloud.documentai.v1beta3.BatchDatasetDocuments.getDefaultInstance()
: datasetDocuments_;
}
}
/**
*
*
* <pre>
* Required. Dataset documents input. If given `filter`, all documents
* satisfying the filter will be deleted. If given documentIds, a maximum of
* 50 documents can be deleted in a batch. The request will be rejected if
* more than 50 document_ids are provided.
* </pre>
*
* <code>
* .google.cloud.documentai.v1beta3.BatchDatasetDocuments dataset_documents = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.documentai.v1beta3.BatchDatasetDocuments,
com.google.cloud.documentai.v1beta3.BatchDatasetDocuments.Builder,
com.google.cloud.documentai.v1beta3.BatchDatasetDocumentsOrBuilder>
getDatasetDocumentsFieldBuilder() {
if (datasetDocumentsBuilder_ == null) {
datasetDocumentsBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.documentai.v1beta3.BatchDatasetDocuments,
com.google.cloud.documentai.v1beta3.BatchDatasetDocuments.Builder,
com.google.cloud.documentai.v1beta3.BatchDatasetDocumentsOrBuilder>(
getDatasetDocuments(), getParentForChildren(), isClean());
datasetDocuments_ = null;
}
return datasetDocumentsBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.documentai.v1beta3.BatchDeleteDocumentsRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.documentai.v1beta3.BatchDeleteDocumentsRequest)
private static final com.google.cloud.documentai.v1beta3.BatchDeleteDocumentsRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.documentai.v1beta3.BatchDeleteDocumentsRequest();
}
public static com.google.cloud.documentai.v1beta3.BatchDeleteDocumentsRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<BatchDeleteDocumentsRequest> PARSER =
new com.google.protobuf.AbstractParser<BatchDeleteDocumentsRequest>() {
@java.lang.Override
public BatchDeleteDocumentsRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<BatchDeleteDocumentsRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<BatchDeleteDocumentsRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.documentai.v1beta3.BatchDeleteDocumentsRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/hadoop | 34,810 | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/distributed/TestNodeQueueLoadMonitor.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.resourcemanager.scheduler.distributed;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import org.apache.hadoop.net.NodeBase;
import org.apache.hadoop.thirdparty.com.google.common.collect.ImmutableMap;
import org.apache.hadoop.yarn.api.protocolrecords.ResourceTypes;
import org.apache.hadoop.yarn.api.records.NodeId;
import org.apache.hadoop.yarn.api.records.NodeState;
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.api.records.ResourceInformation;
import org.apache.hadoop.yarn.api.records.ResourceUtilization;
import org.apache.hadoop.yarn.server.api.records.ContainerQueuingLimit;
import org.apache.hadoop.yarn.server.api.records.OpportunisticContainersStatus;
import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNode;
import org.apache.hadoop.yarn.util.resource.ResourceUtils;
import org.apache.hadoop.yarn.util.resource.Resources;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import org.mockito.Mockito;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* Unit tests for NodeQueueLoadMonitor.
*/
public class TestNodeQueueLoadMonitor {
// Extra resource type to test that all resource dimensions are considered
private static final String NETWORK_RESOURCE = "network";
private final static int DEFAULT_MAX_QUEUE_LENGTH = 200;
// Note: The following variables are private static resources
// re-initialized on each test because resource dimensions considered
// are initialized in a static method.
// Declaring them as static final will "lock-in" resource dimensions and
// disallow specification of a new resource dimension ("network") in tests.
private static Resource defaultResourceRequested;
private static Resource defaultCapacity;
static class FakeNodeId extends NodeId {
final String host;
final int port;
public FakeNodeId(String host, int port) {
this.host = host;
this.port = port;
}
@Override
public String getHost() {
return host;
}
@Override
public int getPort() {
return port;
}
@Override
protected void setHost(String host) {}
@Override
protected void setPort(int port) {}
@Override
protected void build() {}
@Override
public String toString() {
return host + ":" + port;
}
}
private static Resource newResourceInstance(long memory, int vCores) {
return newResourceInstance(memory, vCores, 0L);
}
private static Resource newResourceInstance(
final long memory, final int vCores, final long network) {
return Resource.newInstance(memory, vCores,
ImmutableMap.of(NETWORK_RESOURCE, network));
}
private static long getNetworkResourceValue(final Resource resource) {
return resource.getResourceValue(NETWORK_RESOURCE);
}
public static void addNewTypesToResources(String... resourceTypes) {
// Initialize resource map
Map<String, ResourceInformation> riMap = new HashMap<>();
// Initialize mandatory resources
riMap.put(ResourceInformation.MEMORY_URI, ResourceInformation.MEMORY_MB);
riMap.put(ResourceInformation.VCORES_URI, ResourceInformation.VCORES);
for (String newResource : resourceTypes) {
riMap.put(newResource, ResourceInformation
.newInstance(newResource, "", 0, ResourceTypes.COUNTABLE, 0,
Integer.MAX_VALUE));
}
ResourceUtils.initializeResourcesFromResourceInformationMap(riMap);
}
@BeforeAll
public static void classSetUp() {
addNewTypesToResources(NETWORK_RESOURCE);
defaultResourceRequested = newResourceInstance(128, 1, 1);
defaultCapacity = newResourceInstance(1024, 8, 1000);
}
@Test
public void testWaitTimeSort() {
NodeQueueLoadMonitor selector = new NodeQueueLoadMonitor(
NodeQueueLoadMonitor.LoadComparator.QUEUE_WAIT_TIME);
selector.updateNode(createRMNode("h1", 1, 15, 10));
selector.updateNode(createRMNode("h2", 2, 5, 10));
selector.updateNode(createRMNode("h3", 3, 10, 10));
selector.computeTask.run();
List<NodeId> nodeIds = selector.selectNodes();
assertEquals("h2:2", nodeIds.get(0).toString());
assertEquals("h3:3", nodeIds.get(1).toString());
assertEquals("h1:1", nodeIds.get(2).toString());
// Now update node3
selector.updateNode(createRMNode("h3", 3, 2, 10));
selector.computeTask.run();
nodeIds = selector.selectNodes();
assertEquals("h3:3", nodeIds.get(0).toString());
assertEquals("h2:2", nodeIds.get(1).toString());
assertEquals("h1:1", nodeIds.get(2).toString());
// Now send update with -1 wait time
selector.updateNode(createRMNode("h4", 4, -1, 10));
selector.computeTask.run();
nodeIds = selector.selectNodes();
// No change
assertEquals("h3:3", nodeIds.get(0).toString());
assertEquals("h2:2", nodeIds.get(1).toString());
assertEquals("h1:1", nodeIds.get(2).toString());
// Now update node 2 to DECOMMISSIONING state
selector
.updateNode(createRMNode("h2", 2, 1, 10, NodeState.DECOMMISSIONING));
selector.computeTask.run();
nodeIds = selector.selectNodes();
assertEquals(2, nodeIds.size());
assertEquals("h3:3", nodeIds.get(0).toString());
assertEquals("h1:1", nodeIds.get(1).toString());
// Now update node 2 back to RUNNING state
selector.updateNode(createRMNode("h2", 2, 1, 10, NodeState.RUNNING));
selector.computeTask.run();
nodeIds = selector.selectNodes();
assertEquals("h2:2", nodeIds.get(0).toString());
assertEquals("h3:3", nodeIds.get(1).toString());
assertEquals("h1:1", nodeIds.get(2).toString());
}
@Test
public void testQueueLengthSort() {
NodeQueueLoadMonitor selector = new NodeQueueLoadMonitor(
NodeQueueLoadMonitor.LoadComparator.QUEUE_LENGTH);
selector.updateNode(createRMNode("h1", 1, -1, 15));
selector.updateNode(createRMNode("h2", 2, -1, 5));
selector.updateNode(createRMNode("h3", 3, -1, 10));
selector.computeTask.run();
List<NodeId> nodeIds = selector.selectNodes();
System.out.println("1-> " + nodeIds);
assertEquals("h2:2", nodeIds.get(0).toString());
assertEquals("h3:3", nodeIds.get(1).toString());
assertEquals("h1:1", nodeIds.get(2).toString());
// Now update node3
selector.updateNode(createRMNode("h3", 3, -1, 2));
selector.computeTask.run();
nodeIds = selector.selectNodes();
System.out.println("2-> "+ nodeIds);
assertEquals("h3:3", nodeIds.get(0).toString());
assertEquals("h2:2", nodeIds.get(1).toString());
assertEquals("h1:1", nodeIds.get(2).toString());
// Now send update with -1 wait time but valid length
selector.updateNode(createRMNode("h4", 4, -1, 20));
selector.computeTask.run();
nodeIds = selector.selectNodes();
System.out.println("3-> "+ nodeIds);
// No change
assertEquals("h3:3", nodeIds.get(0).toString());
assertEquals("h2:2", nodeIds.get(1).toString());
assertEquals("h1:1", nodeIds.get(2).toString());
assertEquals("h4:4", nodeIds.get(3).toString());
// Now update h3 and fill its queue.
selector.updateNode(createRMNode("h3", 3, -1,
DEFAULT_MAX_QUEUE_LENGTH));
selector.computeTask.run();
nodeIds = selector.selectNodes();
System.out.println("4-> "+ nodeIds);
assertEquals(3, nodeIds.size());
assertEquals("h2:2", nodeIds.get(0).toString());
assertEquals("h1:1", nodeIds.get(1).toString());
assertEquals("h4:4", nodeIds.get(2).toString());
// Now update h2 to Decommissioning state
selector.updateNode(createRMNode("h2", 2, -1,
5, NodeState.DECOMMISSIONING));
selector.computeTask.run();
nodeIds = selector.selectNodes();
assertEquals(2, nodeIds.size());
assertEquals("h1:1", nodeIds.get(0).toString());
assertEquals("h4:4", nodeIds.get(1).toString());
// Now update h2 back to Running state
selector.updateNode(createRMNode("h2", 2, -1,
5, NodeState.RUNNING));
selector.computeTask.run();
nodeIds = selector.selectNodes();
assertEquals(3, nodeIds.size());
assertEquals("h2:2", nodeIds.get(0).toString());
assertEquals("h1:1", nodeIds.get(1).toString());
assertEquals("h4:4", nodeIds.get(2).toString());
}
@Test
public void testQueueLengthThenResourcesSort() {
NodeQueueLoadMonitor selector = new NodeQueueLoadMonitor(
NodeQueueLoadMonitor.LoadComparator.QUEUE_LENGTH_THEN_RESOURCES);
// Node and queue sizes were selected such that we can determine the
// order of these nodes in the selectNodes call deterministically
// h2 -> h1 -> h3 -> h4
selector.updateNode(createRMNode(
"h1", 1, -1, 0,
Resources.multiply(defaultResourceRequested, 3), defaultCapacity));
selector.updateNode(createRMNode(
"h2", 2, -1, 0,
Resources.multiply(defaultResourceRequested, 2), defaultCapacity));
selector.updateNode(createRMNode(
"h3", 3, -1, 5,
Resources.multiply(defaultResourceRequested, 3), defaultCapacity));
selector.updateNode(createRMNode(
"h4", 4, -1, 10,
Resources.multiply(defaultResourceRequested, 2), defaultCapacity));
selector.computeTask.run();
List<NodeId> nodeIds = selector.selectNodes();
assertEquals("h2:2", nodeIds.get(0).toString());
assertEquals("h1:1", nodeIds.get(1).toString());
assertEquals("h3:3", nodeIds.get(2).toString());
assertEquals("h4:4", nodeIds.get(3).toString());
// Now update node3
// node3 should now rank after node4 since it has the same queue length
// but less resources available
selector.updateNode(createRMNode(
"h3", 3, -1, 10,
Resources.multiply(defaultResourceRequested, 3), defaultCapacity));
selector.computeTask.run();
nodeIds = selector.selectNodes();
assertEquals("h2:2", nodeIds.get(0).toString());
assertEquals("h1:1", nodeIds.get(1).toString());
assertEquals("h4:4", nodeIds.get(2).toString());
assertEquals("h3:3", nodeIds.get(3).toString());
// Now update h3 and fill its queue -- it should no longer be available
selector.updateNode(createRMNode("h3", 3, -1,
DEFAULT_MAX_QUEUE_LENGTH));
selector.computeTask.run();
nodeIds = selector.selectNodes();
// h3 is queued up, so we should only have 3 nodes left
assertEquals(3, nodeIds.size());
assertEquals("h2:2", nodeIds.get(0).toString());
assertEquals("h1:1", nodeIds.get(1).toString());
assertEquals("h4:4", nodeIds.get(2).toString());
// Now update h2 to Decommissioning state
selector.updateNode(createRMNode("h2", 2, -1,
5, NodeState.DECOMMISSIONING));
selector.computeTask.run();
nodeIds = selector.selectNodes();
// h2 is decommissioned, and h3 is full, so we should only have 2 nodes
assertEquals(2, nodeIds.size());
assertEquals("h1:1", nodeIds.get(0).toString());
assertEquals("h4:4", nodeIds.get(1).toString());
// Now update h2 back to Running state
selector.updateNode(createRMNode(
"h2", 2, -1, 0,
Resources.multiply(defaultResourceRequested, 2), defaultCapacity));
selector.computeTask.run();
nodeIds = selector.selectNodes();
assertEquals(3, nodeIds.size());
assertEquals("h2:2", nodeIds.get(0).toString());
assertEquals("h1:1", nodeIds.get(1).toString());
assertEquals("h4:4", nodeIds.get(2).toString());
// Now update h2 to have a zero queue capacity.
// Make sure that here it is still in the pool.
selector.updateNode(createRMNode(
"h2", 2, -1, 0, 0,
Resources.multiply(defaultResourceRequested, 2),
defaultCapacity));
selector.computeTask.run();
nodeIds = selector.selectNodes();
assertEquals(3, nodeIds.size());
assertEquals("h2:2", nodeIds.get(0).toString());
assertEquals("h1:1", nodeIds.get(1).toString());
assertEquals("h4:4", nodeIds.get(2).toString());
// Now update h2 to have a positive queue length but a zero queue capacity.
// Make sure that here it is no longer in the pool.
// Need to first remove the node, because node capacity is not updated.
selector.removeNode(createRMNode(
"h2", 2, -1, 0, 0,
Resources.multiply(defaultResourceRequested, 2),
defaultCapacity));
selector.updateNode(createRMNode(
"h2", 2, -1, 1, 0,
Resources.multiply(defaultResourceRequested, 2),
defaultCapacity));
selector.computeTask.run();
nodeIds = selector.selectNodes();
assertEquals(2, nodeIds.size());
assertEquals("h1:1", nodeIds.get(0).toString());
assertEquals("h4:4", nodeIds.get(1).toString());
}
/**
* Tests that when using QUEUE_LENGTH_THEN_RESOURCES decrements the amount
* of resources on the internal {@link ClusterNode} representation.
*/
@Test
public void testQueueLengthThenResourcesDecrementsAvailable() {
NodeQueueLoadMonitor selector = new NodeQueueLoadMonitor(
NodeQueueLoadMonitor.LoadComparator.QUEUE_LENGTH_THEN_RESOURCES);
RMNode node = createRMNode("h1", 1, -1, 0);
selector.addNode(null, node);
selector.updateNode(node);
selector.updateSortedNodes();
ClusterNode clusterNode = selector.getClusterNodes().get(node.getNodeID());
assertEquals(Resources.none(),
clusterNode.getAllocatedResource());
// Has enough resources
RMNode selectedNode = selector.selectAnyNode(
Collections.emptySet(), defaultResourceRequested);
assertNotNull(selectedNode);
assertEquals(node.getNodeID(), selectedNode.getNodeID());
clusterNode = selector.getClusterNodes().get(node.getNodeID());
assertEquals(defaultResourceRequested,
clusterNode.getAllocatedResource());
// Does not have enough resources, but can queue
selectedNode = selector.selectAnyNode(
Collections.emptySet(), defaultCapacity);
assertNotNull(selectedNode);
assertEquals(node.getNodeID(), selectedNode.getNodeID());
clusterNode = selector.getClusterNodes().get(node.getNodeID());
assertEquals(1, clusterNode.getQueueLength());
// Does not have enough resources and cannot queue
selectedNode = selector.selectAnyNode(
Collections.emptySet(),
Resources.add(defaultResourceRequested, defaultCapacity));
assertNull(selectedNode);
}
@Test
public void testQueueLengthThenResourcesCapabilityChange() {
NodeQueueLoadMonitor selector = new NodeQueueLoadMonitor(
NodeQueueLoadMonitor.LoadComparator.QUEUE_LENGTH_THEN_RESOURCES);
// Node sizes were selected such that we can determine the
// order of these nodes in the selectNodes call deterministically
// h1 -> h2 -> h3 -> h4
selector.updateNode(createRMNode(
"h1", 1, -1, 0,
Resources.multiply(defaultResourceRequested, 1), defaultCapacity));
selector.updateNode(createRMNode(
"h2", 2, -1, 0,
Resources.multiply(defaultResourceRequested, 2), defaultCapacity));
selector.updateNode(createRMNode(
"h3", 3, -1, 0,
Resources.multiply(defaultResourceRequested, 3), defaultCapacity));
selector.updateNode(createRMNode(
"h4", 4, -1, 0,
Resources.multiply(defaultResourceRequested, 4), defaultCapacity));
selector.computeTask.run();
List<NodeId> nodeIds = selector.selectNodes();
assertEquals("h1:1", nodeIds.get(0).toString());
assertEquals("h2:2", nodeIds.get(1).toString());
assertEquals("h3:3", nodeIds.get(2).toString());
assertEquals("h4:4", nodeIds.get(3).toString());
// Now update node1 to have only defaultResourceRequested available
// by changing its capability to 2x defaultResourceReqeusted
// node1 should now rank last
selector.updateNode(createRMNode(
"h1", 1, -1, 0,
Resources.multiply(defaultResourceRequested, 1),
Resources.multiply(defaultResourceRequested, 2)));
selector.computeTask.run();
nodeIds = selector.selectNodes();
assertEquals("h2:2", nodeIds.get(0).toString());
assertEquals("h3:3", nodeIds.get(1).toString());
assertEquals("h4:4", nodeIds.get(2).toString());
assertEquals("h1:1", nodeIds.get(3).toString());
// Now update node2 to have no resources available
// by changing its capability to 1x defaultResourceReqeusted
// node2 should now rank last
selector.updateNode(createRMNode(
"h2", 2, -1, 0,
Resources.multiply(defaultResourceRequested, 1),
Resources.multiply(defaultResourceRequested, 1)));
selector.computeTask.run();
nodeIds = selector.selectNodes();
assertEquals("h3:3", nodeIds.get(0).toString());
assertEquals("h4:4", nodeIds.get(1).toString());
assertEquals("h1:1", nodeIds.get(2).toString());
assertEquals("h2:2", nodeIds.get(3).toString());
}
@Test
public void testContainerQueuingLimit() {
NodeQueueLoadMonitor selector = new NodeQueueLoadMonitor(
NodeQueueLoadMonitor.LoadComparator.QUEUE_LENGTH);
selector.updateNode(createRMNode("h1", 1, -1, 15));
selector.updateNode(createRMNode("h2", 2, -1, 5));
selector.updateNode(createRMNode("h3", 3, -1, 10));
// Test Mean Calculation
selector.initThresholdCalculator(0, 6, 100);
QueueLimitCalculator calculator = selector.getThresholdCalculator();
ContainerQueuingLimit containerQueuingLimit = calculator
.createContainerQueuingLimit();
assertEquals(6, containerQueuingLimit.getMaxQueueLength());
assertEquals(-1, containerQueuingLimit.getMaxQueueWaitTimeInMs());
selector.computeTask.run();
containerQueuingLimit = calculator.createContainerQueuingLimit();
assertEquals(10, containerQueuingLimit.getMaxQueueLength());
assertEquals(-1, containerQueuingLimit.getMaxQueueWaitTimeInMs());
// Test Limits do not exceed specified max
selector.updateNode(createRMNode("h1", 1, -1, 110));
selector.updateNode(createRMNode("h2", 2, -1, 120));
selector.updateNode(createRMNode("h3", 3, -1, 130));
selector.updateNode(createRMNode("h4", 4, -1, 140));
selector.updateNode(createRMNode("h5", 5, -1, 150));
selector.updateNode(createRMNode("h6", 6, -1, 160));
selector.computeTask.run();
containerQueuingLimit = calculator.createContainerQueuingLimit();
assertEquals(100, containerQueuingLimit.getMaxQueueLength());
// Test Limits do not go below specified min
selector.updateNode(createRMNode("h1", 1, -1, 1));
selector.updateNode(createRMNode("h2", 2, -1, 2));
selector.updateNode(createRMNode("h3", 3, -1, 3));
selector.updateNode(createRMNode("h4", 4, -1, 4));
selector.updateNode(createRMNode("h5", 5, -1, 5));
selector.updateNode(createRMNode("h6", 6, -1, 6));
selector.computeTask.run();
containerQueuingLimit = calculator.createContainerQueuingLimit();
assertEquals(6, containerQueuingLimit.getMaxQueueLength());
}
/**
* Tests selection of local node from NodeQueueLoadMonitor. This test covers
* selection of node based on queue limit and blacklisted nodes.
*/
@Test
public void testSelectLocalNode() {
NodeQueueLoadMonitor selector = new NodeQueueLoadMonitor(
NodeQueueLoadMonitor.LoadComparator.QUEUE_LENGTH);
RMNode h1 = createRMNode("h1", 1, -1, 2, 5);
RMNode h2 = createRMNode("h2", 2, -1, 5, 5);
RMNode h3 = createRMNode("h3", 3, -1, 4, 5);
selector.addNode(null, h1);
selector.addNode(null, h2);
selector.addNode(null, h3);
selector.updateNode(h1);
selector.updateNode(h2);
selector.updateNode(h3);
// basic test for selecting node which has queue length less
// than queue capacity.
Set<String> blacklist = new HashSet<>();
RMNode node = selector.selectLocalNode(
"h1", blacklist, defaultResourceRequested);
assertEquals("h1", node.getHostName());
// if node has been added to blacklist
blacklist.add("h1");
node = selector.selectLocalNode(
"h1", blacklist, defaultResourceRequested);
assertNull(node);
node = selector.selectLocalNode(
"h2", blacklist, defaultResourceRequested);
assertNull(node);
node = selector.selectLocalNode(
"h3", blacklist, defaultResourceRequested);
assertEquals("h3", node.getHostName());
}
/**
* Tests selection of rack local node from NodeQueueLoadMonitor. This test
* covers selection of node based on queue limit and blacklisted nodes.
*/
@Test
public void testSelectRackLocalNode() {
NodeQueueLoadMonitor selector = new NodeQueueLoadMonitor(
NodeQueueLoadMonitor.LoadComparator.QUEUE_LENGTH);
RMNode h1 = createRMNode("h1", 1, "rack1", -1, 2, 5);
RMNode h2 = createRMNode("h2", 2, "rack2", -1, 5, 5);
RMNode h3 = createRMNode("h3", 3, "rack2", -1, 4, 5);
selector.addNode(null, h1);
selector.addNode(null, h2);
selector.addNode(null, h3);
selector.updateNode(h1);
selector.updateNode(h2);
selector.updateNode(h3);
// basic test for selecting node which has queue length less
// than queue capacity.
Set<String> blacklist = new HashSet<>();
RMNode node = selector.selectRackLocalNode(
"rack1", blacklist, defaultResourceRequested);
assertEquals("h1", node.getHostName());
// if node has been added to blacklist
blacklist.add("h1");
node = selector.selectRackLocalNode(
"rack1", blacklist, defaultResourceRequested);
assertNull(node);
node = selector.selectRackLocalNode(
"rack2", blacklist, defaultResourceRequested);
assertEquals("h3", node.getHostName());
blacklist.add("h3");
node = selector.selectRackLocalNode(
"rack2", blacklist, defaultResourceRequested);
assertNull(node);
}
/**
* Tests selection of any node from NodeQueueLoadMonitor. This test
* covers selection of node based on queue limit and blacklisted nodes.
*/
@Test
public void testSelectAnyNode() {
NodeQueueLoadMonitor selector = new NodeQueueLoadMonitor(
NodeQueueLoadMonitor.LoadComparator.QUEUE_LENGTH);
RMNode h1 = createRMNode("h1", 1, "rack1", -1, 2, 5);
RMNode h2 = createRMNode("h2", 2, "rack2", -1, 5, 5);
RMNode h3 = createRMNode("h3", 3, "rack2", -1, 4, 10);
selector.addNode(null, h1);
selector.addNode(null, h2);
selector.addNode(null, h3);
selector.updateNode(h1);
selector.updateNode(h2);
selector.updateNode(h3);
selector.computeTask.run();
assertEquals(2, selector.getSortedNodes().size());
// basic test for selecting node which has queue length
// less than queue capacity.
Set<String> blacklist = new HashSet<>();
RMNode node = selector.selectAnyNode(blacklist, defaultResourceRequested);
assertTrue(node.getHostName().equals("h1") ||
node.getHostName().equals("h3"));
// if node has been added to blacklist
blacklist.add("h1");
node = selector.selectAnyNode(blacklist, defaultResourceRequested);
assertEquals("h3", node.getHostName());
blacklist.add("h3");
node = selector.selectAnyNode(blacklist, defaultResourceRequested);
assertNull(node);
}
@Test
public void testQueueLengthThenResourcesComparator() {
NodeQueueLoadMonitor.LoadComparator comparator =
NodeQueueLoadMonitor.LoadComparator.QUEUE_LENGTH_THEN_RESOURCES;
NodeId n1 = new FakeNodeId("n1", 5000);
NodeId n2 = new FakeNodeId("n2", 5000);
// Case 1: larger available cores should be ranked first
{
ClusterNode.Properties cn1Props =
ClusterNode.Properties.newInstance()
.setAllocatedResource(newResourceInstance(5, 5))
.setCapability(newResourceInstance(10, 10, 1000))
.setQueueLength(10);
ClusterNode cn1 = new ClusterNode(n1);
cn1.setProperties(cn1Props);
ClusterNode.Properties cn2Props =
ClusterNode.Properties.newInstance()
.setAllocatedResource(newResourceInstance(6, 6))
.setCapability(newResourceInstance(10, 10, 1000))
.setQueueLength(10);
ClusterNode cn2 = new ClusterNode(n2);
cn2.setProperties(cn2Props);
comparator.setClusterResource(
Resources.add(cn1.getCapability(), cn2.getCapability()));
assertTrue(comparator.compare(cn1, cn2) < 0);
}
// Case 2: Shorter queue should be ranked first before comparing resources
{
ClusterNode.Properties cn1Props =
ClusterNode.Properties.newInstance()
.setAllocatedResource(newResourceInstance(5, 5))
.setCapability(newResourceInstance(10, 10, 1000))
.setQueueLength(5);
ClusterNode cn1 = new ClusterNode(n1);
cn1.setProperties(cn1Props);
ClusterNode.Properties cn2Props =
ClusterNode.Properties.newInstance()
.setAllocatedResource(newResourceInstance(3, 3))
.setCapability(newResourceInstance(10, 10, 1000))
.setQueueLength(10);
ClusterNode cn2 = new ClusterNode(n2);
cn2.setProperties(cn2Props);
comparator.setClusterResource(
Resources.add(cn1.getCapability(), cn2.getCapability()));
assertTrue(comparator.compare(cn1, cn2) < 0);
}
// Case 3: No capability vs with capability,
// with capability should come first
{
ClusterNode.Properties cn1Props =
ClusterNode.Properties.newInstance()
.setAllocatedResource(Resources.none())
.setCapability(newResourceInstance(1, 1, 1000))
.setQueueLength(5);
ClusterNode cn1 = new ClusterNode(n1);
cn1.setProperties(cn1Props);
ClusterNode.Properties cn2Props =
ClusterNode.Properties.newInstance()
.setAllocatedResource(Resources.none())
.setCapability(Resources.none())
.setQueueLength(5);
ClusterNode cn2 = new ClusterNode(n2);
cn2.setProperties(cn2Props);
comparator.setClusterResource(
Resources.add(cn1.getCapability(), cn2.getCapability()));
assertTrue(comparator.compare(cn1, cn2) < 0);
}
// Case 4: Compare same values
{
ClusterNode.Properties cn1Props =
ClusterNode.Properties.newInstance()
.setAllocatedResource(newResourceInstance(5, 5))
.setCapability(newResourceInstance(10, 10, 1000))
.setQueueLength(10);
ClusterNode cn1 = new ClusterNode(n1);
cn1.setProperties(cn1Props);
ClusterNode.Properties cn2Props =
ClusterNode.Properties.newInstance()
.setAllocatedResource(newResourceInstance(5, 5))
.setCapability(newResourceInstance(10, 10, 1000))
.setQueueLength(10);
ClusterNode cn2 = new ClusterNode(n2);
cn2.setProperties(cn2Props);
comparator.setClusterResource(
Resources.add(cn1.getCapability(), cn2.getCapability()));
assertEquals(0, comparator.compare(cn1, cn2));
}
// Case 5: If ratio is the same, compare raw values
// by VCores first, then memory
{
ClusterNode.Properties cn1Props =
ClusterNode.Properties.newInstance()
.setAllocatedResource(newResourceInstance(6, 5))
.setCapability(newResourceInstance(10, 10, 1000))
.setQueueLength(10);
ClusterNode cn1 = new ClusterNode(n1);
cn1.setProperties(cn1Props);
ClusterNode.Properties cn2Props =
ClusterNode.Properties.newInstance()
.setAllocatedResource(newResourceInstance(5, 6))
.setCapability(newResourceInstance(10, 10, 1000))
.setQueueLength(10);
ClusterNode cn2 = new ClusterNode(n2);
cn2.setProperties(cn2Props);
comparator.setClusterResource(
Resources.add(cn1.getCapability(), cn2.getCapability()));
// Both are 60% allocated, but CN1 has 5 avail VCores, CN2 only has 4
assertTrue(comparator.compare(cn1, cn2) < 0);
}
// Case 6: by VCores absolute value
{
ClusterNode.Properties cn1Props =
ClusterNode.Properties.newInstance()
.setAllocatedResource(newResourceInstance(5, 5))
.setCapability(newResourceInstance(10, 10, 1000))
.setQueueLength(10);
ClusterNode cn1 = new ClusterNode(n1);
cn1.setProperties(cn1Props);
ClusterNode.Properties cn2Props =
ClusterNode.Properties.newInstance()
.setAllocatedResource(newResourceInstance(5, 6))
.setCapability(newResourceInstance(10, 12, 1000))
.setQueueLength(10);
ClusterNode cn2 = new ClusterNode(n2);
cn2.setProperties(cn2Props);
comparator.setClusterResource(
Resources.add(cn1.getCapability(), cn2.getCapability()));
assertTrue(comparator.compare(cn2, cn1) < 0);
}
// Case 7: by memory absolute value
{
ClusterNode.Properties cn1Props =
ClusterNode.Properties.newInstance()
.setAllocatedResource(newResourceInstance(5, 5))
.setCapability(newResourceInstance(10, 10, 1000))
.setQueueLength(10);
ClusterNode cn1 = new ClusterNode(n1);
cn1.setProperties(cn1Props);
ClusterNode.Properties cn2Props =
ClusterNode.Properties.newInstance()
.setAllocatedResource(newResourceInstance(6, 5))
.setCapability(newResourceInstance(12, 10, 1000))
.setQueueLength(10);
ClusterNode cn2 = new ClusterNode(n2);
cn2.setProperties(cn2Props);
comparator.setClusterResource(
Resources.add(cn1.getCapability(), cn2.getCapability()));
assertTrue(comparator.compare(cn2, cn1) < 0);
}
// Case 8: Memory should be more constraining in the overall cluster,
// so rank the node with less allocated memory first
{
ClusterNode.Properties cn1Props =
ClusterNode.Properties.newInstance()
.setAllocatedResource(newResourceInstance(5, 11))
.setCapability(newResourceInstance(10, 100, 1000))
.setQueueLength(10);
ClusterNode cn1 = new ClusterNode(n1);
cn1.setProperties(cn1Props);
ClusterNode.Properties cn2Props =
ClusterNode.Properties.newInstance()
.setAllocatedResource(newResourceInstance(6, 10))
.setCapability(newResourceInstance(10, 100, 1000))
.setQueueLength(10);
ClusterNode cn2 = new ClusterNode(n2);
cn2.setProperties(cn2Props);
comparator.setClusterResource(
Resources.add(cn1.getCapability(), cn2.getCapability()));
assertTrue(comparator.compare(cn1, cn2) < 0);
}
}
private RMNode createRMNode(String host, int port,
int waitTime, int queueLength) {
return createRMNode(host, port, waitTime, queueLength,
DEFAULT_MAX_QUEUE_LENGTH);
}
private RMNode createRMNode(String host, int port,
int waitTime, int queueLength, NodeState state) {
return createRMNode(host, port, "default", waitTime, queueLength,
DEFAULT_MAX_QUEUE_LENGTH, state);
}
private RMNode createRMNode(String host, int port,
int waitTime, int queueLength, int queueCapacity) {
return createRMNode(host, port, "default", waitTime, queueLength,
queueCapacity, NodeState.RUNNING);
}
private RMNode createRMNode(String host, int port, String rack,
int waitTime, int queueLength, int queueCapacity) {
return createRMNode(host, port, rack, waitTime, queueLength, queueCapacity,
NodeState.RUNNING);
}
private RMNode createRMNode(String host, int port, String rack,
int waitTime, int queueLength, int queueCapacity, NodeState state) {
return createRMNode(host, port, rack, waitTime, queueLength, queueCapacity,
state, Resources.none(), defaultCapacity);
}
private RMNode createRMNode(
String host, int port, int waitTime, int queueLength,
Resource allocatedResource, Resource nodeResource) {
return createRMNode(host, port, waitTime, queueLength,
DEFAULT_MAX_QUEUE_LENGTH, allocatedResource, nodeResource);
}
private RMNode createRMNode(
String host, int port, int waitTime, int queueLength, int queueCapacity,
Resource allocatedResource, Resource nodeResource) {
return createRMNode(host, port, "default", waitTime, queueLength,
queueCapacity, NodeState.RUNNING, allocatedResource, nodeResource);
}
@SuppressWarnings("parameternumber")
private RMNode createRMNode(String host, int port, String rack,
int waitTime, int queueLength, int queueCapacity, NodeState state,
Resource allocatedResource, Resource nodeResource) {
RMNode node1 = Mockito.mock(RMNode.class);
NodeId nID1 = new FakeNodeId(host, port);
Mockito.when(node1.getHostName()).thenReturn(host);
Mockito.when(node1.getRackName()).thenReturn(rack);
Mockito.when(node1.getNode()).thenReturn(new NodeBase("/" + host));
Mockito.when(node1.getNodeID()).thenReturn(nID1);
Mockito.when(node1.getState()).thenReturn(state);
Mockito.when(node1.getTotalCapability()).thenReturn(nodeResource);
Mockito.when(node1.getNodeUtilization()).thenReturn(
ResourceUtilization.newInstance(0, 0, 0));
Mockito.when(node1.getAllocatedContainerResource()).thenReturn(
allocatedResource);
OpportunisticContainersStatus status1 =
Mockito.mock(OpportunisticContainersStatus.class);
Mockito.when(status1.getEstimatedQueueWaitTime())
.thenReturn(waitTime);
Mockito.when(status1.getWaitQueueLength())
.thenReturn(queueLength);
Mockito.when(status1.getOpportQueueCapacity())
.thenReturn(queueCapacity);
Mockito.when(node1.getOpportunisticContainersStatus()).thenReturn(status1);
return node1;
}
}
|
googleapis/google-cloud-java | 34,999 | java-datalabeling/proto-google-cloud-datalabeling-v1beta1/src/main/java/com/google/cloud/datalabeling/v1beta1/TextClassificationConfig.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/datalabeling/v1beta1/human_annotation_config.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.datalabeling.v1beta1;
/**
*
*
* <pre>
* Config for text classification human labeling task.
* </pre>
*
* Protobuf type {@code google.cloud.datalabeling.v1beta1.TextClassificationConfig}
*/
public final class TextClassificationConfig extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.datalabeling.v1beta1.TextClassificationConfig)
TextClassificationConfigOrBuilder {
private static final long serialVersionUID = 0L;
// Use TextClassificationConfig.newBuilder() to construct.
private TextClassificationConfig(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private TextClassificationConfig() {
annotationSpecSet_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new TextClassificationConfig();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.datalabeling.v1beta1.HumanAnnotationConfigOuterClass
.internal_static_google_cloud_datalabeling_v1beta1_TextClassificationConfig_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.datalabeling.v1beta1.HumanAnnotationConfigOuterClass
.internal_static_google_cloud_datalabeling_v1beta1_TextClassificationConfig_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.datalabeling.v1beta1.TextClassificationConfig.class,
com.google.cloud.datalabeling.v1beta1.TextClassificationConfig.Builder.class);
}
private int bitField0_;
public static final int ALLOW_MULTI_LABEL_FIELD_NUMBER = 1;
private boolean allowMultiLabel_ = false;
/**
*
*
* <pre>
* Optional. If allow_multi_label is true, contributors are able to choose
* multiple labels for one text segment.
* </pre>
*
* <code>bool allow_multi_label = 1 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The allowMultiLabel.
*/
@java.lang.Override
public boolean getAllowMultiLabel() {
return allowMultiLabel_;
}
public static final int ANNOTATION_SPEC_SET_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object annotationSpecSet_ = "";
/**
*
*
* <pre>
* Required. Annotation spec set resource name.
* </pre>
*
* <code>string annotation_spec_set = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The annotationSpecSet.
*/
@java.lang.Override
public java.lang.String getAnnotationSpecSet() {
java.lang.Object ref = annotationSpecSet_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
annotationSpecSet_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. Annotation spec set resource name.
* </pre>
*
* <code>string annotation_spec_set = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for annotationSpecSet.
*/
@java.lang.Override
public com.google.protobuf.ByteString getAnnotationSpecSetBytes() {
java.lang.Object ref = annotationSpecSet_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
annotationSpecSet_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int SENTIMENT_CONFIG_FIELD_NUMBER = 3;
private com.google.cloud.datalabeling.v1beta1.SentimentConfig sentimentConfig_;
/**
*
*
* <pre>
* Optional. Configs for sentiment selection.
* </pre>
*
* <code>
* .google.cloud.datalabeling.v1beta1.SentimentConfig sentiment_config = 3 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the sentimentConfig field is set.
*/
@java.lang.Override
public boolean hasSentimentConfig() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Optional. Configs for sentiment selection.
* </pre>
*
* <code>
* .google.cloud.datalabeling.v1beta1.SentimentConfig sentiment_config = 3 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The sentimentConfig.
*/
@java.lang.Override
public com.google.cloud.datalabeling.v1beta1.SentimentConfig getSentimentConfig() {
return sentimentConfig_ == null
? com.google.cloud.datalabeling.v1beta1.SentimentConfig.getDefaultInstance()
: sentimentConfig_;
}
/**
*
*
* <pre>
* Optional. Configs for sentiment selection.
* </pre>
*
* <code>
* .google.cloud.datalabeling.v1beta1.SentimentConfig sentiment_config = 3 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
@java.lang.Override
public com.google.cloud.datalabeling.v1beta1.SentimentConfigOrBuilder
getSentimentConfigOrBuilder() {
return sentimentConfig_ == null
? com.google.cloud.datalabeling.v1beta1.SentimentConfig.getDefaultInstance()
: sentimentConfig_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (allowMultiLabel_ != false) {
output.writeBool(1, allowMultiLabel_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(annotationSpecSet_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, annotationSpecSet_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(3, getSentimentConfig());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (allowMultiLabel_ != false) {
size += com.google.protobuf.CodedOutputStream.computeBoolSize(1, allowMultiLabel_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(annotationSpecSet_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, annotationSpecSet_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getSentimentConfig());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.datalabeling.v1beta1.TextClassificationConfig)) {
return super.equals(obj);
}
com.google.cloud.datalabeling.v1beta1.TextClassificationConfig other =
(com.google.cloud.datalabeling.v1beta1.TextClassificationConfig) obj;
if (getAllowMultiLabel() != other.getAllowMultiLabel()) return false;
if (!getAnnotationSpecSet().equals(other.getAnnotationSpecSet())) return false;
if (hasSentimentConfig() != other.hasSentimentConfig()) return false;
if (hasSentimentConfig()) {
if (!getSentimentConfig().equals(other.getSentimentConfig())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + ALLOW_MULTI_LABEL_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getAllowMultiLabel());
hash = (37 * hash) + ANNOTATION_SPEC_SET_FIELD_NUMBER;
hash = (53 * hash) + getAnnotationSpecSet().hashCode();
if (hasSentimentConfig()) {
hash = (37 * hash) + SENTIMENT_CONFIG_FIELD_NUMBER;
hash = (53 * hash) + getSentimentConfig().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.datalabeling.v1beta1.TextClassificationConfig parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datalabeling.v1beta1.TextClassificationConfig parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datalabeling.v1beta1.TextClassificationConfig parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datalabeling.v1beta1.TextClassificationConfig parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datalabeling.v1beta1.TextClassificationConfig parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datalabeling.v1beta1.TextClassificationConfig parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datalabeling.v1beta1.TextClassificationConfig parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.datalabeling.v1beta1.TextClassificationConfig parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.datalabeling.v1beta1.TextClassificationConfig parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.datalabeling.v1beta1.TextClassificationConfig parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.datalabeling.v1beta1.TextClassificationConfig parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.datalabeling.v1beta1.TextClassificationConfig parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.datalabeling.v1beta1.TextClassificationConfig prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Config for text classification human labeling task.
* </pre>
*
* Protobuf type {@code google.cloud.datalabeling.v1beta1.TextClassificationConfig}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.datalabeling.v1beta1.TextClassificationConfig)
com.google.cloud.datalabeling.v1beta1.TextClassificationConfigOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.datalabeling.v1beta1.HumanAnnotationConfigOuterClass
.internal_static_google_cloud_datalabeling_v1beta1_TextClassificationConfig_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.datalabeling.v1beta1.HumanAnnotationConfigOuterClass
.internal_static_google_cloud_datalabeling_v1beta1_TextClassificationConfig_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.datalabeling.v1beta1.TextClassificationConfig.class,
com.google.cloud.datalabeling.v1beta1.TextClassificationConfig.Builder.class);
}
// Construct using com.google.cloud.datalabeling.v1beta1.TextClassificationConfig.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getSentimentConfigFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
allowMultiLabel_ = false;
annotationSpecSet_ = "";
sentimentConfig_ = null;
if (sentimentConfigBuilder_ != null) {
sentimentConfigBuilder_.dispose();
sentimentConfigBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.datalabeling.v1beta1.HumanAnnotationConfigOuterClass
.internal_static_google_cloud_datalabeling_v1beta1_TextClassificationConfig_descriptor;
}
@java.lang.Override
public com.google.cloud.datalabeling.v1beta1.TextClassificationConfig
getDefaultInstanceForType() {
return com.google.cloud.datalabeling.v1beta1.TextClassificationConfig.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.datalabeling.v1beta1.TextClassificationConfig build() {
com.google.cloud.datalabeling.v1beta1.TextClassificationConfig result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.datalabeling.v1beta1.TextClassificationConfig buildPartial() {
com.google.cloud.datalabeling.v1beta1.TextClassificationConfig result =
new com.google.cloud.datalabeling.v1beta1.TextClassificationConfig(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.datalabeling.v1beta1.TextClassificationConfig result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.allowMultiLabel_ = allowMultiLabel_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.annotationSpecSet_ = annotationSpecSet_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000004) != 0)) {
result.sentimentConfig_ =
sentimentConfigBuilder_ == null ? sentimentConfig_ : sentimentConfigBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.datalabeling.v1beta1.TextClassificationConfig) {
return mergeFrom((com.google.cloud.datalabeling.v1beta1.TextClassificationConfig) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.datalabeling.v1beta1.TextClassificationConfig other) {
if (other
== com.google.cloud.datalabeling.v1beta1.TextClassificationConfig.getDefaultInstance())
return this;
if (other.getAllowMultiLabel() != false) {
setAllowMultiLabel(other.getAllowMultiLabel());
}
if (!other.getAnnotationSpecSet().isEmpty()) {
annotationSpecSet_ = other.annotationSpecSet_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.hasSentimentConfig()) {
mergeSentimentConfig(other.getSentimentConfig());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8:
{
allowMultiLabel_ = input.readBool();
bitField0_ |= 0x00000001;
break;
} // case 8
case 18:
{
annotationSpecSet_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
input.readMessage(getSentimentConfigFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private boolean allowMultiLabel_;
/**
*
*
* <pre>
* Optional. If allow_multi_label is true, contributors are able to choose
* multiple labels for one text segment.
* </pre>
*
* <code>bool allow_multi_label = 1 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The allowMultiLabel.
*/
@java.lang.Override
public boolean getAllowMultiLabel() {
return allowMultiLabel_;
}
/**
*
*
* <pre>
* Optional. If allow_multi_label is true, contributors are able to choose
* multiple labels for one text segment.
* </pre>
*
* <code>bool allow_multi_label = 1 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The allowMultiLabel to set.
* @return This builder for chaining.
*/
public Builder setAllowMultiLabel(boolean value) {
allowMultiLabel_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. If allow_multi_label is true, contributors are able to choose
* multiple labels for one text segment.
* </pre>
*
* <code>bool allow_multi_label = 1 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearAllowMultiLabel() {
bitField0_ = (bitField0_ & ~0x00000001);
allowMultiLabel_ = false;
onChanged();
return this;
}
private java.lang.Object annotationSpecSet_ = "";
/**
*
*
* <pre>
* Required. Annotation spec set resource name.
* </pre>
*
* <code>string annotation_spec_set = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The annotationSpecSet.
*/
public java.lang.String getAnnotationSpecSet() {
java.lang.Object ref = annotationSpecSet_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
annotationSpecSet_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. Annotation spec set resource name.
* </pre>
*
* <code>string annotation_spec_set = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for annotationSpecSet.
*/
public com.google.protobuf.ByteString getAnnotationSpecSetBytes() {
java.lang.Object ref = annotationSpecSet_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
annotationSpecSet_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. Annotation spec set resource name.
* </pre>
*
* <code>string annotation_spec_set = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The annotationSpecSet to set.
* @return This builder for chaining.
*/
public Builder setAnnotationSpecSet(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
annotationSpecSet_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Annotation spec set resource name.
* </pre>
*
* <code>string annotation_spec_set = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearAnnotationSpecSet() {
annotationSpecSet_ = getDefaultInstance().getAnnotationSpecSet();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Annotation spec set resource name.
* </pre>
*
* <code>string annotation_spec_set = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for annotationSpecSet to set.
* @return This builder for chaining.
*/
public Builder setAnnotationSpecSetBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
annotationSpecSet_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private com.google.cloud.datalabeling.v1beta1.SentimentConfig sentimentConfig_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.datalabeling.v1beta1.SentimentConfig,
com.google.cloud.datalabeling.v1beta1.SentimentConfig.Builder,
com.google.cloud.datalabeling.v1beta1.SentimentConfigOrBuilder>
sentimentConfigBuilder_;
/**
*
*
* <pre>
* Optional. Configs for sentiment selection.
* </pre>
*
* <code>
* .google.cloud.datalabeling.v1beta1.SentimentConfig sentiment_config = 3 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the sentimentConfig field is set.
*/
public boolean hasSentimentConfig() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
*
*
* <pre>
* Optional. Configs for sentiment selection.
* </pre>
*
* <code>
* .google.cloud.datalabeling.v1beta1.SentimentConfig sentiment_config = 3 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The sentimentConfig.
*/
public com.google.cloud.datalabeling.v1beta1.SentimentConfig getSentimentConfig() {
if (sentimentConfigBuilder_ == null) {
return sentimentConfig_ == null
? com.google.cloud.datalabeling.v1beta1.SentimentConfig.getDefaultInstance()
: sentimentConfig_;
} else {
return sentimentConfigBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Optional. Configs for sentiment selection.
* </pre>
*
* <code>
* .google.cloud.datalabeling.v1beta1.SentimentConfig sentiment_config = 3 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder setSentimentConfig(com.google.cloud.datalabeling.v1beta1.SentimentConfig value) {
if (sentimentConfigBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
sentimentConfig_ = value;
} else {
sentimentConfigBuilder_.setMessage(value);
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Configs for sentiment selection.
* </pre>
*
* <code>
* .google.cloud.datalabeling.v1beta1.SentimentConfig sentiment_config = 3 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder setSentimentConfig(
com.google.cloud.datalabeling.v1beta1.SentimentConfig.Builder builderForValue) {
if (sentimentConfigBuilder_ == null) {
sentimentConfig_ = builderForValue.build();
} else {
sentimentConfigBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Configs for sentiment selection.
* </pre>
*
* <code>
* .google.cloud.datalabeling.v1beta1.SentimentConfig sentiment_config = 3 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder mergeSentimentConfig(
com.google.cloud.datalabeling.v1beta1.SentimentConfig value) {
if (sentimentConfigBuilder_ == null) {
if (((bitField0_ & 0x00000004) != 0)
&& sentimentConfig_ != null
&& sentimentConfig_
!= com.google.cloud.datalabeling.v1beta1.SentimentConfig.getDefaultInstance()) {
getSentimentConfigBuilder().mergeFrom(value);
} else {
sentimentConfig_ = value;
}
} else {
sentimentConfigBuilder_.mergeFrom(value);
}
if (sentimentConfig_ != null) {
bitField0_ |= 0x00000004;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Optional. Configs for sentiment selection.
* </pre>
*
* <code>
* .google.cloud.datalabeling.v1beta1.SentimentConfig sentiment_config = 3 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder clearSentimentConfig() {
bitField0_ = (bitField0_ & ~0x00000004);
sentimentConfig_ = null;
if (sentimentConfigBuilder_ != null) {
sentimentConfigBuilder_.dispose();
sentimentConfigBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Configs for sentiment selection.
* </pre>
*
* <code>
* .google.cloud.datalabeling.v1beta1.SentimentConfig sentiment_config = 3 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public com.google.cloud.datalabeling.v1beta1.SentimentConfig.Builder
getSentimentConfigBuilder() {
bitField0_ |= 0x00000004;
onChanged();
return getSentimentConfigFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Optional. Configs for sentiment selection.
* </pre>
*
* <code>
* .google.cloud.datalabeling.v1beta1.SentimentConfig sentiment_config = 3 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public com.google.cloud.datalabeling.v1beta1.SentimentConfigOrBuilder
getSentimentConfigOrBuilder() {
if (sentimentConfigBuilder_ != null) {
return sentimentConfigBuilder_.getMessageOrBuilder();
} else {
return sentimentConfig_ == null
? com.google.cloud.datalabeling.v1beta1.SentimentConfig.getDefaultInstance()
: sentimentConfig_;
}
}
/**
*
*
* <pre>
* Optional. Configs for sentiment selection.
* </pre>
*
* <code>
* .google.cloud.datalabeling.v1beta1.SentimentConfig sentiment_config = 3 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.datalabeling.v1beta1.SentimentConfig,
com.google.cloud.datalabeling.v1beta1.SentimentConfig.Builder,
com.google.cloud.datalabeling.v1beta1.SentimentConfigOrBuilder>
getSentimentConfigFieldBuilder() {
if (sentimentConfigBuilder_ == null) {
sentimentConfigBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.datalabeling.v1beta1.SentimentConfig,
com.google.cloud.datalabeling.v1beta1.SentimentConfig.Builder,
com.google.cloud.datalabeling.v1beta1.SentimentConfigOrBuilder>(
getSentimentConfig(), getParentForChildren(), isClean());
sentimentConfig_ = null;
}
return sentimentConfigBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.datalabeling.v1beta1.TextClassificationConfig)
}
// @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.TextClassificationConfig)
private static final com.google.cloud.datalabeling.v1beta1.TextClassificationConfig
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.datalabeling.v1beta1.TextClassificationConfig();
}
public static com.google.cloud.datalabeling.v1beta1.TextClassificationConfig
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<TextClassificationConfig> PARSER =
new com.google.protobuf.AbstractParser<TextClassificationConfig>() {
@java.lang.Override
public TextClassificationConfig parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<TextClassificationConfig> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<TextClassificationConfig> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.datalabeling.v1beta1.TextClassificationConfig
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 35,134 | java-dialogflow-cx/google-cloud-dialogflow-cx/src/main/java/com/google/cloud/dialogflow/cx/v3/ChangelogsClient.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.dialogflow.cx.v3;
import com.google.api.core.ApiFuture;
import com.google.api.core.ApiFutures;
import com.google.api.gax.core.BackgroundResource;
import com.google.api.gax.paging.AbstractFixedSizeCollection;
import com.google.api.gax.paging.AbstractPage;
import com.google.api.gax.paging.AbstractPagedListResponse;
import com.google.api.gax.rpc.PageContext;
import com.google.api.gax.rpc.UnaryCallable;
import com.google.cloud.dialogflow.cx.v3.stub.ChangelogsStub;
import com.google.cloud.dialogflow.cx.v3.stub.ChangelogsStubSettings;
import com.google.cloud.location.GetLocationRequest;
import com.google.cloud.location.ListLocationsRequest;
import com.google.cloud.location.ListLocationsResponse;
import com.google.cloud.location.Location;
import com.google.common.util.concurrent.MoreExecutors;
import java.io.IOException;
import java.util.List;
import java.util.concurrent.TimeUnit;
import javax.annotation.Generated;
// AUTO-GENERATED DOCUMENTATION AND CLASS.
/**
* Service Description: Service for managing [Changelogs][google.cloud.dialogflow.cx.v3.Changelog].
*
* <p>This class provides the ability to make remote calls to the backing service through method
* calls that map to API methods. Sample code to get started:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (ChangelogsClient changelogsClient = ChangelogsClient.create()) {
* ChangelogName name = ChangelogName.of("[PROJECT]", "[LOCATION]", "[AGENT]", "[CHANGELOG]");
* Changelog response = changelogsClient.getChangelog(name);
* }
* }</pre>
*
* <p>Note: close() needs to be called on the ChangelogsClient object to clean up resources such as
* threads. In the example above, try-with-resources is used, which automatically calls close().
*
* <table>
* <caption>Methods</caption>
* <tr>
* <th>Method</th>
* <th>Description</th>
* <th>Method Variants</th>
* </tr>
* <tr>
* <td><p> ListChangelogs</td>
* <td><p> Returns the list of Changelogs.</td>
* <td>
* <p>Request object method variants only take one parameter, a request object, which must be constructed before the call.</p>
* <ul>
* <li><p> listChangelogs(ListChangelogsRequest request)
* </ul>
* <p>"Flattened" method variants have converted the fields of the request object into function parameters to enable multiple ways to call the same method.</p>
* <ul>
* <li><p> listChangelogs(AgentName parent)
* <li><p> listChangelogs(String parent)
* </ul>
* <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p>
* <ul>
* <li><p> listChangelogsPagedCallable()
* <li><p> listChangelogsCallable()
* </ul>
* </td>
* </tr>
* <tr>
* <td><p> GetChangelog</td>
* <td><p> Retrieves the specified Changelog.</td>
* <td>
* <p>Request object method variants only take one parameter, a request object, which must be constructed before the call.</p>
* <ul>
* <li><p> getChangelog(GetChangelogRequest request)
* </ul>
* <p>"Flattened" method variants have converted the fields of the request object into function parameters to enable multiple ways to call the same method.</p>
* <ul>
* <li><p> getChangelog(ChangelogName name)
* <li><p> getChangelog(String name)
* </ul>
* <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p>
* <ul>
* <li><p> getChangelogCallable()
* </ul>
* </td>
* </tr>
* <tr>
* <td><p> ListLocations</td>
* <td><p> Lists information about the supported locations for this service.</td>
* <td>
* <p>Request object method variants only take one parameter, a request object, which must be constructed before the call.</p>
* <ul>
* <li><p> listLocations(ListLocationsRequest request)
* </ul>
* <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p>
* <ul>
* <li><p> listLocationsPagedCallable()
* <li><p> listLocationsCallable()
* </ul>
* </td>
* </tr>
* <tr>
* <td><p> GetLocation</td>
* <td><p> Gets information about a location.</td>
* <td>
* <p>Request object method variants only take one parameter, a request object, which must be constructed before the call.</p>
* <ul>
* <li><p> getLocation(GetLocationRequest request)
* </ul>
* <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p>
* <ul>
* <li><p> getLocationCallable()
* </ul>
* </td>
* </tr>
* </table>
*
* <p>See the individual methods for example code.
*
* <p>Many parameters require resource names to be formatted in a particular way. To assist with
* these names, this class includes a format method for each type of name, and additionally a parse
* method to extract the individual identifiers contained within names that are returned.
*
* <p>This class can be customized by passing in a custom instance of ChangelogsSettings to
* create(). For example:
*
* <p>To customize credentials:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* ChangelogsSettings changelogsSettings =
* ChangelogsSettings.newBuilder()
* .setCredentialsProvider(FixedCredentialsProvider.create(myCredentials))
* .build();
* ChangelogsClient changelogsClient = ChangelogsClient.create(changelogsSettings);
* }</pre>
*
* <p>To customize the endpoint:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* ChangelogsSettings changelogsSettings =
* ChangelogsSettings.newBuilder().setEndpoint(myEndpoint).build();
* ChangelogsClient changelogsClient = ChangelogsClient.create(changelogsSettings);
* }</pre>
*
* <p>To use REST (HTTP1.1/JSON) transport (instead of gRPC) for sending and receiving requests over
* the wire:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* ChangelogsSettings changelogsSettings = ChangelogsSettings.newHttpJsonBuilder().build();
* ChangelogsClient changelogsClient = ChangelogsClient.create(changelogsSettings);
* }</pre>
*
* <p>Please refer to the GitHub repository's samples for more quickstart code snippets.
*/
@Generated("by gapic-generator-java")
public class ChangelogsClient implements BackgroundResource {
private final ChangelogsSettings settings;
private final ChangelogsStub stub;
/** Constructs an instance of ChangelogsClient with default settings. */
public static final ChangelogsClient create() throws IOException {
return create(ChangelogsSettings.newBuilder().build());
}
/**
* Constructs an instance of ChangelogsClient, using the given settings. The channels are created
* based on the settings passed in, or defaults for any settings that are not set.
*/
public static final ChangelogsClient create(ChangelogsSettings settings) throws IOException {
return new ChangelogsClient(settings);
}
/**
* Constructs an instance of ChangelogsClient, using the given stub for making calls. This is for
* advanced usage - prefer using create(ChangelogsSettings).
*/
public static final ChangelogsClient create(ChangelogsStub stub) {
return new ChangelogsClient(stub);
}
/**
* Constructs an instance of ChangelogsClient, using the given settings. This is protected so that
* it is easy to make a subclass, but otherwise, the static factory methods should be preferred.
*/
protected ChangelogsClient(ChangelogsSettings settings) throws IOException {
this.settings = settings;
this.stub = ((ChangelogsStubSettings) settings.getStubSettings()).createStub();
}
protected ChangelogsClient(ChangelogsStub stub) {
this.settings = null;
this.stub = stub;
}
public final ChangelogsSettings getSettings() {
return settings;
}
public ChangelogsStub getStub() {
return stub;
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Returns the list of Changelogs.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (ChangelogsClient changelogsClient = ChangelogsClient.create()) {
* AgentName parent = AgentName.of("[PROJECT]", "[LOCATION]", "[AGENT]");
* for (Changelog element : changelogsClient.listChangelogs(parent).iterateAll()) {
* // doThingsWith(element);
* }
* }
* }</pre>
*
* @param parent Required. The agent containing the changelogs. Format:
* `projects/<ProjectID>/locations/<LocationID>/agents/<AgentID>`.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final ListChangelogsPagedResponse listChangelogs(AgentName parent) {
ListChangelogsRequest request =
ListChangelogsRequest.newBuilder()
.setParent(parent == null ? null : parent.toString())
.build();
return listChangelogs(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Returns the list of Changelogs.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (ChangelogsClient changelogsClient = ChangelogsClient.create()) {
* String parent = AgentName.of("[PROJECT]", "[LOCATION]", "[AGENT]").toString();
* for (Changelog element : changelogsClient.listChangelogs(parent).iterateAll()) {
* // doThingsWith(element);
* }
* }
* }</pre>
*
* @param parent Required. The agent containing the changelogs. Format:
* `projects/<ProjectID>/locations/<LocationID>/agents/<AgentID>`.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final ListChangelogsPagedResponse listChangelogs(String parent) {
ListChangelogsRequest request = ListChangelogsRequest.newBuilder().setParent(parent).build();
return listChangelogs(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Returns the list of Changelogs.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (ChangelogsClient changelogsClient = ChangelogsClient.create()) {
* ListChangelogsRequest request =
* ListChangelogsRequest.newBuilder()
* .setParent(AgentName.of("[PROJECT]", "[LOCATION]", "[AGENT]").toString())
* .setFilter("filter-1274492040")
* .setPageSize(883849137)
* .setPageToken("pageToken873572522")
* .build();
* for (Changelog element : changelogsClient.listChangelogs(request).iterateAll()) {
* // doThingsWith(element);
* }
* }
* }</pre>
*
* @param request The request object containing all of the parameters for the API call.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final ListChangelogsPagedResponse listChangelogs(ListChangelogsRequest request) {
return listChangelogsPagedCallable().call(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Returns the list of Changelogs.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (ChangelogsClient changelogsClient = ChangelogsClient.create()) {
* ListChangelogsRequest request =
* ListChangelogsRequest.newBuilder()
* .setParent(AgentName.of("[PROJECT]", "[LOCATION]", "[AGENT]").toString())
* .setFilter("filter-1274492040")
* .setPageSize(883849137)
* .setPageToken("pageToken873572522")
* .build();
* ApiFuture<Changelog> future =
* changelogsClient.listChangelogsPagedCallable().futureCall(request);
* // Do something.
* for (Changelog element : future.get().iterateAll()) {
* // doThingsWith(element);
* }
* }
* }</pre>
*/
public final UnaryCallable<ListChangelogsRequest, ListChangelogsPagedResponse>
listChangelogsPagedCallable() {
return stub.listChangelogsPagedCallable();
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Returns the list of Changelogs.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (ChangelogsClient changelogsClient = ChangelogsClient.create()) {
* ListChangelogsRequest request =
* ListChangelogsRequest.newBuilder()
* .setParent(AgentName.of("[PROJECT]", "[LOCATION]", "[AGENT]").toString())
* .setFilter("filter-1274492040")
* .setPageSize(883849137)
* .setPageToken("pageToken873572522")
* .build();
* while (true) {
* ListChangelogsResponse response = changelogsClient.listChangelogsCallable().call(request);
* for (Changelog element : response.getChangelogsList()) {
* // doThingsWith(element);
* }
* String nextPageToken = response.getNextPageToken();
* if (!Strings.isNullOrEmpty(nextPageToken)) {
* request = request.toBuilder().setPageToken(nextPageToken).build();
* } else {
* break;
* }
* }
* }
* }</pre>
*/
public final UnaryCallable<ListChangelogsRequest, ListChangelogsResponse>
listChangelogsCallable() {
return stub.listChangelogsCallable();
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Retrieves the specified Changelog.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (ChangelogsClient changelogsClient = ChangelogsClient.create()) {
* ChangelogName name = ChangelogName.of("[PROJECT]", "[LOCATION]", "[AGENT]", "[CHANGELOG]");
* Changelog response = changelogsClient.getChangelog(name);
* }
* }</pre>
*
* @param name Required. The name of the changelog to get. Format:
* `projects/<ProjectID>/locations/<LocationID>/agents/<AgentID>/changelogs/<ChangelogID>`.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final Changelog getChangelog(ChangelogName name) {
GetChangelogRequest request =
GetChangelogRequest.newBuilder().setName(name == null ? null : name.toString()).build();
return getChangelog(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Retrieves the specified Changelog.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (ChangelogsClient changelogsClient = ChangelogsClient.create()) {
* String name =
* ChangelogName.of("[PROJECT]", "[LOCATION]", "[AGENT]", "[CHANGELOG]").toString();
* Changelog response = changelogsClient.getChangelog(name);
* }
* }</pre>
*
* @param name Required. The name of the changelog to get. Format:
* `projects/<ProjectID>/locations/<LocationID>/agents/<AgentID>/changelogs/<ChangelogID>`.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final Changelog getChangelog(String name) {
GetChangelogRequest request = GetChangelogRequest.newBuilder().setName(name).build();
return getChangelog(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Retrieves the specified Changelog.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (ChangelogsClient changelogsClient = ChangelogsClient.create()) {
* GetChangelogRequest request =
* GetChangelogRequest.newBuilder()
* .setName(
* ChangelogName.of("[PROJECT]", "[LOCATION]", "[AGENT]", "[CHANGELOG]").toString())
* .build();
* Changelog response = changelogsClient.getChangelog(request);
* }
* }</pre>
*
* @param request The request object containing all of the parameters for the API call.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final Changelog getChangelog(GetChangelogRequest request) {
return getChangelogCallable().call(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Retrieves the specified Changelog.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (ChangelogsClient changelogsClient = ChangelogsClient.create()) {
* GetChangelogRequest request =
* GetChangelogRequest.newBuilder()
* .setName(
* ChangelogName.of("[PROJECT]", "[LOCATION]", "[AGENT]", "[CHANGELOG]").toString())
* .build();
* ApiFuture<Changelog> future = changelogsClient.getChangelogCallable().futureCall(request);
* // Do something.
* Changelog response = future.get();
* }
* }</pre>
*/
public final UnaryCallable<GetChangelogRequest, Changelog> getChangelogCallable() {
return stub.getChangelogCallable();
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Lists information about the supported locations for this service.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (ChangelogsClient changelogsClient = ChangelogsClient.create()) {
* ListLocationsRequest request =
* ListLocationsRequest.newBuilder()
* .setName("name3373707")
* .setFilter("filter-1274492040")
* .setPageSize(883849137)
* .setPageToken("pageToken873572522")
* .build();
* for (Location element : changelogsClient.listLocations(request).iterateAll()) {
* // doThingsWith(element);
* }
* }
* }</pre>
*
* @param request The request object containing all of the parameters for the API call.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final ListLocationsPagedResponse listLocations(ListLocationsRequest request) {
return listLocationsPagedCallable().call(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Lists information about the supported locations for this service.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (ChangelogsClient changelogsClient = ChangelogsClient.create()) {
* ListLocationsRequest request =
* ListLocationsRequest.newBuilder()
* .setName("name3373707")
* .setFilter("filter-1274492040")
* .setPageSize(883849137)
* .setPageToken("pageToken873572522")
* .build();
* ApiFuture<Location> future =
* changelogsClient.listLocationsPagedCallable().futureCall(request);
* // Do something.
* for (Location element : future.get().iterateAll()) {
* // doThingsWith(element);
* }
* }
* }</pre>
*/
public final UnaryCallable<ListLocationsRequest, ListLocationsPagedResponse>
listLocationsPagedCallable() {
return stub.listLocationsPagedCallable();
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Lists information about the supported locations for this service.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (ChangelogsClient changelogsClient = ChangelogsClient.create()) {
* ListLocationsRequest request =
* ListLocationsRequest.newBuilder()
* .setName("name3373707")
* .setFilter("filter-1274492040")
* .setPageSize(883849137)
* .setPageToken("pageToken873572522")
* .build();
* while (true) {
* ListLocationsResponse response = changelogsClient.listLocationsCallable().call(request);
* for (Location element : response.getLocationsList()) {
* // doThingsWith(element);
* }
* String nextPageToken = response.getNextPageToken();
* if (!Strings.isNullOrEmpty(nextPageToken)) {
* request = request.toBuilder().setPageToken(nextPageToken).build();
* } else {
* break;
* }
* }
* }
* }</pre>
*/
public final UnaryCallable<ListLocationsRequest, ListLocationsResponse> listLocationsCallable() {
return stub.listLocationsCallable();
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Gets information about a location.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (ChangelogsClient changelogsClient = ChangelogsClient.create()) {
* GetLocationRequest request = GetLocationRequest.newBuilder().setName("name3373707").build();
* Location response = changelogsClient.getLocation(request);
* }
* }</pre>
*
* @param request The request object containing all of the parameters for the API call.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final Location getLocation(GetLocationRequest request) {
return getLocationCallable().call(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Gets information about a location.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (ChangelogsClient changelogsClient = ChangelogsClient.create()) {
* GetLocationRequest request = GetLocationRequest.newBuilder().setName("name3373707").build();
* ApiFuture<Location> future = changelogsClient.getLocationCallable().futureCall(request);
* // Do something.
* Location response = future.get();
* }
* }</pre>
*/
public final UnaryCallable<GetLocationRequest, Location> getLocationCallable() {
return stub.getLocationCallable();
}
@Override
public final void close() {
stub.close();
}
@Override
public void shutdown() {
stub.shutdown();
}
@Override
public boolean isShutdown() {
return stub.isShutdown();
}
@Override
public boolean isTerminated() {
return stub.isTerminated();
}
@Override
public void shutdownNow() {
stub.shutdownNow();
}
@Override
public boolean awaitTermination(long duration, TimeUnit unit) throws InterruptedException {
return stub.awaitTermination(duration, unit);
}
public static class ListChangelogsPagedResponse
extends AbstractPagedListResponse<
ListChangelogsRequest,
ListChangelogsResponse,
Changelog,
ListChangelogsPage,
ListChangelogsFixedSizeCollection> {
public static ApiFuture<ListChangelogsPagedResponse> createAsync(
PageContext<ListChangelogsRequest, ListChangelogsResponse, Changelog> context,
ApiFuture<ListChangelogsResponse> futureResponse) {
ApiFuture<ListChangelogsPage> futurePage =
ListChangelogsPage.createEmptyPage().createPageAsync(context, futureResponse);
return ApiFutures.transform(
futurePage,
input -> new ListChangelogsPagedResponse(input),
MoreExecutors.directExecutor());
}
private ListChangelogsPagedResponse(ListChangelogsPage page) {
super(page, ListChangelogsFixedSizeCollection.createEmptyCollection());
}
}
public static class ListChangelogsPage
extends AbstractPage<
ListChangelogsRequest, ListChangelogsResponse, Changelog, ListChangelogsPage> {
private ListChangelogsPage(
PageContext<ListChangelogsRequest, ListChangelogsResponse, Changelog> context,
ListChangelogsResponse response) {
super(context, response);
}
private static ListChangelogsPage createEmptyPage() {
return new ListChangelogsPage(null, null);
}
@Override
protected ListChangelogsPage createPage(
PageContext<ListChangelogsRequest, ListChangelogsResponse, Changelog> context,
ListChangelogsResponse response) {
return new ListChangelogsPage(context, response);
}
@Override
public ApiFuture<ListChangelogsPage> createPageAsync(
PageContext<ListChangelogsRequest, ListChangelogsResponse, Changelog> context,
ApiFuture<ListChangelogsResponse> futureResponse) {
return super.createPageAsync(context, futureResponse);
}
}
public static class ListChangelogsFixedSizeCollection
extends AbstractFixedSizeCollection<
ListChangelogsRequest,
ListChangelogsResponse,
Changelog,
ListChangelogsPage,
ListChangelogsFixedSizeCollection> {
private ListChangelogsFixedSizeCollection(List<ListChangelogsPage> pages, int collectionSize) {
super(pages, collectionSize);
}
private static ListChangelogsFixedSizeCollection createEmptyCollection() {
return new ListChangelogsFixedSizeCollection(null, 0);
}
@Override
protected ListChangelogsFixedSizeCollection createCollection(
List<ListChangelogsPage> pages, int collectionSize) {
return new ListChangelogsFixedSizeCollection(pages, collectionSize);
}
}
public static class ListLocationsPagedResponse
extends AbstractPagedListResponse<
ListLocationsRequest,
ListLocationsResponse,
Location,
ListLocationsPage,
ListLocationsFixedSizeCollection> {
public static ApiFuture<ListLocationsPagedResponse> createAsync(
PageContext<ListLocationsRequest, ListLocationsResponse, Location> context,
ApiFuture<ListLocationsResponse> futureResponse) {
ApiFuture<ListLocationsPage> futurePage =
ListLocationsPage.createEmptyPage().createPageAsync(context, futureResponse);
return ApiFutures.transform(
futurePage,
input -> new ListLocationsPagedResponse(input),
MoreExecutors.directExecutor());
}
private ListLocationsPagedResponse(ListLocationsPage page) {
super(page, ListLocationsFixedSizeCollection.createEmptyCollection());
}
}
public static class ListLocationsPage
extends AbstractPage<
ListLocationsRequest, ListLocationsResponse, Location, ListLocationsPage> {
private ListLocationsPage(
PageContext<ListLocationsRequest, ListLocationsResponse, Location> context,
ListLocationsResponse response) {
super(context, response);
}
private static ListLocationsPage createEmptyPage() {
return new ListLocationsPage(null, null);
}
@Override
protected ListLocationsPage createPage(
PageContext<ListLocationsRequest, ListLocationsResponse, Location> context,
ListLocationsResponse response) {
return new ListLocationsPage(context, response);
}
@Override
public ApiFuture<ListLocationsPage> createPageAsync(
PageContext<ListLocationsRequest, ListLocationsResponse, Location> context,
ApiFuture<ListLocationsResponse> futureResponse) {
return super.createPageAsync(context, futureResponse);
}
}
public static class ListLocationsFixedSizeCollection
extends AbstractFixedSizeCollection<
ListLocationsRequest,
ListLocationsResponse,
Location,
ListLocationsPage,
ListLocationsFixedSizeCollection> {
private ListLocationsFixedSizeCollection(List<ListLocationsPage> pages, int collectionSize) {
super(pages, collectionSize);
}
private static ListLocationsFixedSizeCollection createEmptyCollection() {
return new ListLocationsFixedSizeCollection(null, 0);
}
@Override
protected ListLocationsFixedSizeCollection createCollection(
List<ListLocationsPage> pages, int collectionSize) {
return new ListLocationsFixedSizeCollection(pages, collectionSize);
}
}
}
|
apache/fineract | 35,581 | fineract-savings/src/main/java/org/apache/fineract/portfolio/savings/data/SavingsProductDataValidator.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.fineract.portfolio.savings.data;
import static org.apache.fineract.portfolio.savings.SavingsApiConstants.SAVINGS_PRODUCT_RESOURCE_NAME;
import static org.apache.fineract.portfolio.savings.SavingsApiConstants.allowOverdraftParamName;
import static org.apache.fineract.portfolio.savings.SavingsApiConstants.currencyCodeParamName;
import static org.apache.fineract.portfolio.savings.SavingsApiConstants.daysToDormancyParamName;
import static org.apache.fineract.portfolio.savings.SavingsApiConstants.daysToEscheatParamName;
import static org.apache.fineract.portfolio.savings.SavingsApiConstants.daysToInactiveParamName;
import static org.apache.fineract.portfolio.savings.SavingsApiConstants.descriptionParamName;
import static org.apache.fineract.portfolio.savings.SavingsApiConstants.digitsAfterDecimalParamName;
import static org.apache.fineract.portfolio.savings.SavingsApiConstants.feeAmountParamName;
import static org.apache.fineract.portfolio.savings.SavingsApiConstants.feeOnMonthDayParamName;
import static org.apache.fineract.portfolio.savings.SavingsApiConstants.inMultiplesOfParamName;
import static org.apache.fineract.portfolio.savings.SavingsApiConstants.interestCalculationDaysInYearTypeParamName;
import static org.apache.fineract.portfolio.savings.SavingsApiConstants.interestCalculationTypeParamName;
import static org.apache.fineract.portfolio.savings.SavingsApiConstants.interestCompoundingPeriodTypeParamName;
import static org.apache.fineract.portfolio.savings.SavingsApiConstants.interestPostingPeriodTypeParamName;
import static org.apache.fineract.portfolio.savings.SavingsApiConstants.isDormancyTrackingActiveParamName;
import static org.apache.fineract.portfolio.savings.SavingsApiConstants.lienAllowedParamName;
import static org.apache.fineract.portfolio.savings.SavingsApiConstants.lockinPeriodFrequencyParamName;
import static org.apache.fineract.portfolio.savings.SavingsApiConstants.lockinPeriodFrequencyTypeParamName;
import static org.apache.fineract.portfolio.savings.SavingsApiConstants.maxAllowedLienLimitParamName;
import static org.apache.fineract.portfolio.savings.SavingsApiConstants.minBalanceForInterestCalculationParamName;
import static org.apache.fineract.portfolio.savings.SavingsApiConstants.minOverdraftForInterestCalculationParamName;
import static org.apache.fineract.portfolio.savings.SavingsApiConstants.minRequiredOpeningBalanceParamName;
import static org.apache.fineract.portfolio.savings.SavingsApiConstants.nameParamName;
import static org.apache.fineract.portfolio.savings.SavingsApiConstants.nominalAnnualInterestRateOverdraftParamName;
import static org.apache.fineract.portfolio.savings.SavingsApiConstants.nominalAnnualInterestRateParamName;
import static org.apache.fineract.portfolio.savings.SavingsApiConstants.overdraftLimitParamName;
import static org.apache.fineract.portfolio.savings.SavingsApiConstants.shortNameParamName;
import static org.apache.fineract.portfolio.savings.SavingsApiConstants.taxGroupIdParamName;
import static org.apache.fineract.portfolio.savings.SavingsApiConstants.withHoldTaxParamName;
import static org.apache.fineract.portfolio.savings.SavingsApiConstants.withdrawalFeeForTransfersParamName;
import com.google.gson.JsonElement;
import com.google.gson.reflect.TypeToken;
import java.lang.reflect.Type;
import java.math.BigDecimal;
import java.time.MonthDay;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import lombok.RequiredArgsConstructor;
import org.apache.commons.lang3.BooleanUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.fineract.accounting.common.AccountingConstants.SavingProductAccountingParams;
import org.apache.fineract.accounting.common.AccountingValidations;
import org.apache.fineract.infrastructure.core.data.ApiParameterError;
import org.apache.fineract.infrastructure.core.data.DataValidatorBuilder;
import org.apache.fineract.infrastructure.core.exception.InvalidJsonException;
import org.apache.fineract.infrastructure.core.exception.PlatformApiDataValidationException;
import org.apache.fineract.infrastructure.core.serialization.FromJsonHelper;
import org.apache.fineract.portfolio.savings.DepositAccountType;
import org.apache.fineract.portfolio.savings.SavingsApiConstants;
import org.apache.fineract.portfolio.savings.SavingsCompoundingInterestPeriodType;
import org.apache.fineract.portfolio.savings.SavingsInterestCalculationDaysInYearType;
import org.apache.fineract.portfolio.savings.SavingsInterestCalculationType;
import org.apache.fineract.portfolio.savings.SavingsPostingInterestPeriodType;
import org.apache.fineract.portfolio.savings.domain.SavingsProduct;
import org.springframework.stereotype.Component;
@Component
@RequiredArgsConstructor
public class SavingsProductDataValidator {
private final FromJsonHelper fromApiJsonHelper;
private final SavingsProductAccountingDataValidator savingsProductAccountingDataValidator;
private static final Set<String> SAVINGS_PRODUCT_REQUEST_DATA_PARAMETERS = new HashSet<>(Arrays.asList(
SavingsApiConstants.localeParamName, SavingsApiConstants.monthDayFormatParamName, nameParamName, shortNameParamName,
descriptionParamName, currencyCodeParamName, digitsAfterDecimalParamName, inMultiplesOfParamName,
nominalAnnualInterestRateParamName, interestCompoundingPeriodTypeParamName, interestPostingPeriodTypeParamName,
interestCalculationTypeParamName, interestCalculationDaysInYearTypeParamName, minRequiredOpeningBalanceParamName,
lockinPeriodFrequencyParamName, lockinPeriodFrequencyTypeParamName, SavingsApiConstants.withdrawalFeeAmountParamName,
SavingsApiConstants.withdrawalFeeTypeParamName, withdrawalFeeForTransfersParamName, feeAmountParamName, feeOnMonthDayParamName,
SavingsApiConstants.accountingRuleParamName, SavingsApiConstants.chargesParamName,
SavingProductAccountingParams.INCOME_FROM_FEES.getValue(), SavingProductAccountingParams.INCOME_FROM_PENALTIES.getValue(),
SavingProductAccountingParams.INTEREST_ON_SAVINGS.getValue(), SavingProductAccountingParams.PENALTIES_RECEIVABLE.getValue(),
SavingProductAccountingParams.PAYMENT_CHANNEL_FUND_SOURCE_MAPPING.getValue(),
SavingProductAccountingParams.SAVINGS_CONTROL.getValue(), SavingProductAccountingParams.TRANSFERS_SUSPENSE.getValue(),
SavingProductAccountingParams.SAVINGS_REFERENCE.getValue(), SavingProductAccountingParams.FEE_INCOME_ACCOUNT_MAPPING.getValue(),
SavingProductAccountingParams.PENALTY_INCOME_ACCOUNT_MAPPING.getValue(),
SavingProductAccountingParams.FEES_RECEIVABLE.getValue(), SavingProductAccountingParams.INTEREST_PAYABLE.getValue(),
SavingProductAccountingParams.OVERDRAFT_PORTFOLIO_CONTROL.getValue(),
SavingProductAccountingParams.LOSSES_WRITTEN_OFF.getValue(), SavingProductAccountingParams.INCOME_FROM_INTEREST.getValue(),
SavingProductAccountingParams.ESCHEAT_LIABILITY.getValue(), SavingProductAccountingParams.INTEREST_RECEIVABLE.getValue(),
isDormancyTrackingActiveParamName, daysToDormancyParamName, daysToInactiveParamName, daysToEscheatParamName,
allowOverdraftParamName, overdraftLimitParamName, nominalAnnualInterestRateOverdraftParamName,
minOverdraftForInterestCalculationParamName, SavingsApiConstants.minRequiredBalanceParamName,
SavingsApiConstants.enforceMinRequiredBalanceParamName, SavingsApiConstants.maxAllowedLienLimitParamName,
SavingsApiConstants.lienAllowedParamName, minBalanceForInterestCalculationParamName, withHoldTaxParamName,
taxGroupIdParamName));
public void validateForCreate(final String json) {
if (StringUtils.isBlank(json)) {
throw new InvalidJsonException();
}
final Type typeOfMap = new TypeToken<Map<String, Object>>() {}.getType();
this.fromApiJsonHelper.checkForUnsupportedParameters(typeOfMap, json, SAVINGS_PRODUCT_REQUEST_DATA_PARAMETERS);
final List<ApiParameterError> dataValidationErrors = new ArrayList<>();
final DataValidatorBuilder baseDataValidator = new DataValidatorBuilder(dataValidationErrors)
.resource(SAVINGS_PRODUCT_RESOURCE_NAME);
final JsonElement element = this.fromApiJsonHelper.parse(json);
final String name = this.fromApiJsonHelper.extractStringNamed(nameParamName, element);
baseDataValidator.reset().parameter(nameParamName).value(name).notBlank().notExceedingLengthOf(100);
final String shortName = this.fromApiJsonHelper.extractStringNamed(shortNameParamName, element);
baseDataValidator.reset().parameter(shortNameParamName).value(shortName).notBlank().notExceedingLengthOf(4);
final String currencyCode = this.fromApiJsonHelper.extractStringNamed(currencyCodeParamName, element);
baseDataValidator.reset().parameter(currencyCodeParamName).value(currencyCode).notBlank();
final Integer digitsAfterDecimal = this.fromApiJsonHelper.extractIntegerSansLocaleNamed(digitsAfterDecimalParamName, element);
baseDataValidator.reset().parameter(digitsAfterDecimalParamName).value(digitsAfterDecimal).notNull().inMinMaxRange(0, 6);
if (this.fromApiJsonHelper.parameterExists(inMultiplesOfParamName, element)) {
final Integer inMultiplesOf = this.fromApiJsonHelper.extractIntegerNamed(inMultiplesOfParamName, element, Locale.getDefault());
baseDataValidator.reset().parameter(inMultiplesOfParamName).value(inMultiplesOf).ignoreIfNull().integerZeroOrGreater();
}
final BigDecimal nominalAnnualInterestRate = this.fromApiJsonHelper
.extractBigDecimalWithLocaleNamed(nominalAnnualInterestRateParamName, element);
baseDataValidator.reset().parameter(nominalAnnualInterestRateParamName).value(nominalAnnualInterestRate).notNull()
.zeroOrPositiveAmount();
final Integer interestCompoundingPeriodType = this.fromApiJsonHelper
.extractIntegerSansLocaleNamed(interestCompoundingPeriodTypeParamName, element);
baseDataValidator.reset().parameter(interestCompoundingPeriodTypeParamName).value(interestCompoundingPeriodType).notNull()
.isOneOfTheseValues(SavingsCompoundingInterestPeriodType.integerValues());
final Integer interestPostingPeriodType = this.fromApiJsonHelper.extractIntegerSansLocaleNamed(interestPostingPeriodTypeParamName,
element);
baseDataValidator.reset().parameter(interestPostingPeriodTypeParamName).value(interestPostingPeriodType).notNull()
.isOneOfTheseValues(SavingsPostingInterestPeriodType.integerValues());
final Integer interestCalculationType = this.fromApiJsonHelper.extractIntegerSansLocaleNamed(interestCalculationTypeParamName,
element);
baseDataValidator.reset().parameter(interestCalculationTypeParamName).value(interestCalculationType).notNull()
.isOneOfTheseValues(SavingsInterestCalculationType.integerValues());
final Integer interestCalculationDaysInYearType = this.fromApiJsonHelper
.extractIntegerSansLocaleNamed(interestCalculationDaysInYearTypeParamName, element);
baseDataValidator.reset().parameter(interestCalculationDaysInYearTypeParamName).value(interestCalculationDaysInYearType).notNull()
.isOneOfTheseValues(SavingsInterestCalculationDaysInYearType.integerValues());
if (this.fromApiJsonHelper.parameterExists(descriptionParamName, element)) {
final String description = this.fromApiJsonHelper.extractStringNamed(descriptionParamName, element);
baseDataValidator.reset().parameter(descriptionParamName).value(description).ignoreIfNull().notExceedingLengthOf(500);
}
if (this.fromApiJsonHelper.parameterExists(minRequiredOpeningBalanceParamName, element)) {
final BigDecimal minOpeningBalance = this.fromApiJsonHelper.extractBigDecimalWithLocaleNamed(minRequiredOpeningBalanceParamName,
element);
baseDataValidator.reset().parameter(minRequiredOpeningBalanceParamName).value(minOpeningBalance).zeroOrPositiveAmount();
}
if (this.fromApiJsonHelper.parameterExists(lockinPeriodFrequencyParamName, element)) {
final Integer lockinPeriodFrequency = this.fromApiJsonHelper.extractIntegerWithLocaleNamed(lockinPeriodFrequencyParamName,
element);
baseDataValidator.reset().parameter(lockinPeriodFrequencyParamName).value(lockinPeriodFrequency).integerZeroOrGreater();
if (lockinPeriodFrequency != null) {
final Integer lockinPeriodFrequencyType = this.fromApiJsonHelper
.extractIntegerSansLocaleNamed(lockinPeriodFrequencyTypeParamName, element);
baseDataValidator.reset().parameter(lockinPeriodFrequencyTypeParamName).value(lockinPeriodFrequencyType).notNull()
.inMinMaxRange(0, 3);
}
}
if (this.fromApiJsonHelper.parameterExists(lockinPeriodFrequencyTypeParamName, element)) {
final Integer lockinPeriodFrequencyType = this.fromApiJsonHelper
.extractIntegerSansLocaleNamed(lockinPeriodFrequencyTypeParamName, element);
baseDataValidator.reset().parameter(lockinPeriodFrequencyTypeParamName).value(lockinPeriodFrequencyType).inMinMaxRange(0, 3);
if (lockinPeriodFrequencyType != null) {
final Integer lockinPeriodFrequency = this.fromApiJsonHelper.extractIntegerWithLocaleNamed(lockinPeriodFrequencyParamName,
element);
baseDataValidator.reset().parameter(lockinPeriodFrequencyParamName).value(lockinPeriodFrequency).notNull()
.integerZeroOrGreater();
}
}
if (this.fromApiJsonHelper.parameterExists(withdrawalFeeForTransfersParamName, element)) {
final Boolean isWithdrawalFeeApplicableForTransfers = this.fromApiJsonHelper
.extractBooleanNamed(withdrawalFeeForTransfersParamName, element);
baseDataValidator.reset().parameter(withdrawalFeeForTransfersParamName).value(isWithdrawalFeeApplicableForTransfers)
.ignoreIfNull().validateForBooleanValue();
}
if (this.fromApiJsonHelper.parameterExists(feeAmountParamName, element)) {
final BigDecimal annualFeeAmount = this.fromApiJsonHelper.extractBigDecimalWithLocaleNamed(feeAmountParamName, element);
baseDataValidator.reset().parameter(feeAmountParamName).value(annualFeeAmount).zeroOrPositiveAmount();
if (annualFeeAmount != null) {
final MonthDay monthDayOfAnnualFee = this.fromApiJsonHelper.extractMonthDayNamed(feeOnMonthDayParamName, element);
baseDataValidator.reset().parameter(feeOnMonthDayParamName).value(monthDayOfAnnualFee).notNull();
}
}
if (this.fromApiJsonHelper.parameterExists(feeOnMonthDayParamName, element)) {
final MonthDay monthDayOfAnnualFee = this.fromApiJsonHelper.extractMonthDayNamed(feeOnMonthDayParamName, element);
if (monthDayOfAnnualFee != null) {
final BigDecimal annualFeeAmount = this.fromApiJsonHelper.extractBigDecimalWithLocaleNamed(feeAmountParamName, element);
baseDataValidator.reset().parameter(feeAmountParamName).value(annualFeeAmount).notNull().zeroOrPositiveAmount();
}
}
// dormancy
Boolean isDormancyActive = this.fromApiJsonHelper.extractBooleanNamed(isDormancyTrackingActiveParamName, element);
if (isDormancyActive == null) {
isDormancyActive = false;
}
if (isDormancyActive) {
final Long daysToInact = this.fromApiJsonHelper.extractLongNamed(daysToInactiveParamName, element);
baseDataValidator.reset().parameter(daysToInactiveParamName).value(daysToInact).notNull().longGreaterThanZero();
final Long daysToDor = this.fromApiJsonHelper.extractLongNamed(daysToDormancyParamName, element);
baseDataValidator.reset().parameter(daysToDormancyParamName).value(daysToDor).notNull().longGreaterThanZero();
final Long daysToEsc = this.fromApiJsonHelper.extractLongNamed(daysToEscheatParamName, element);
baseDataValidator.reset().parameter(daysToEscheatParamName).value(daysToEsc).notNull().longGreaterThanZero();
if (null != daysToInact && null != daysToDor && null != daysToEsc) {
baseDataValidator.reset().parameter(daysToDormancyParamName).value(daysToDor).longGreaterThanNumber(daysToInact);
baseDataValidator.reset().parameter(daysToEscheatParamName).value(daysToEsc).longGreaterThanNumber(daysToDor);
}
}
// accounting related data validation
final Integer accountingRuleType = this.fromApiJsonHelper.extractIntegerNamed("accountingRule", element, Locale.getDefault());
baseDataValidator.reset().parameter("accountingRule").value(accountingRuleType).notNull().inMinMaxRange(1, 3);
if (AccountingValidations.isCashBasedAccounting(accountingRuleType)
|| AccountingValidations.isAccrualPeriodicBasedAccounting(accountingRuleType)) {
savingsProductAccountingDataValidator.evaluateProductAccountingData(accountingRuleType, isDormancyActive, element,
baseDataValidator, DepositAccountType.SAVINGS_DEPOSIT, true);
}
validateOverdraftParams(baseDataValidator, element);
if (this.fromApiJsonHelper.parameterExists(minBalanceForInterestCalculationParamName, element)) {
final BigDecimal minBalanceForInterestCalculation = this.fromApiJsonHelper
.extractBigDecimalWithLocaleNamed(minBalanceForInterestCalculationParamName, element);
baseDataValidator.reset().parameter(minBalanceForInterestCalculationParamName).value(minBalanceForInterestCalculation)
.ignoreIfNull().zeroOrPositiveAmount();
}
validateTaxWithHoldingParams(baseDataValidator, element, true);
validateLienParams(baseDataValidator, element);
throwExceptionIfValidationWarningsExist(dataValidationErrors);
}
public void validateForUpdate(final String json, final SavingsProduct product) {
if (StringUtils.isBlank(json)) {
throw new InvalidJsonException();
}
final Type typeOfMap = new TypeToken<Map<String, Object>>() {}.getType();
this.fromApiJsonHelper.checkForUnsupportedParameters(typeOfMap, json, SAVINGS_PRODUCT_REQUEST_DATA_PARAMETERS);
final List<ApiParameterError> dataValidationErrors = new ArrayList<>();
final DataValidatorBuilder baseDataValidator = new DataValidatorBuilder(dataValidationErrors)
.resource(SAVINGS_PRODUCT_RESOURCE_NAME);
final JsonElement element = this.fromApiJsonHelper.parse(json);
if (this.fromApiJsonHelper.parameterExists(nameParamName, element)) {
final String name = this.fromApiJsonHelper.extractStringNamed(nameParamName, element);
baseDataValidator.reset().parameter(nameParamName).value(name).notBlank().notExceedingLengthOf(100);
}
if (this.fromApiJsonHelper.parameterExists(shortNameParamName, element)) {
final String shortName = this.fromApiJsonHelper.extractStringNamed(shortNameParamName, element);
baseDataValidator.reset().parameter(shortNameParamName).value(shortName).notBlank().notExceedingLengthOf(4);
}
if (this.fromApiJsonHelper.parameterExists(descriptionParamName, element)) {
final String description = this.fromApiJsonHelper.extractStringNamed(descriptionParamName, element);
baseDataValidator.reset().parameter(descriptionParamName).value(description).notBlank().notExceedingLengthOf(500);
}
if (this.fromApiJsonHelper.parameterExists(currencyCodeParamName, element)) {
final String currencyCode = this.fromApiJsonHelper.extractStringNamed(currencyCodeParamName, element);
baseDataValidator.reset().parameter(currencyCodeParamName).value(currencyCode).notBlank();
}
if (this.fromApiJsonHelper.parameterExists(digitsAfterDecimalParamName, element)) {
final Integer digitsAfterDecimal = this.fromApiJsonHelper.extractIntegerSansLocaleNamed(digitsAfterDecimalParamName, element);
baseDataValidator.reset().parameter(digitsAfterDecimalParamName).value(digitsAfterDecimal).notNull().inMinMaxRange(0, 6);
}
if (this.fromApiJsonHelper.parameterExists(inMultiplesOfParamName, element)) {
final Integer inMultiplesOf = this.fromApiJsonHelper.extractIntegerNamed(inMultiplesOfParamName, element, Locale.getDefault());
baseDataValidator.reset().parameter(inMultiplesOfParamName).value(inMultiplesOf).ignoreIfNull().integerZeroOrGreater();
}
if (this.fromApiJsonHelper.parameterExists(nominalAnnualInterestRateParamName, element)) {
final BigDecimal interestRate = this.fromApiJsonHelper.extractBigDecimalWithLocaleNamed(nominalAnnualInterestRateParamName,
element);
baseDataValidator.reset().parameter(nominalAnnualInterestRateParamName).value(interestRate).notNull().zeroOrPositiveAmount();
}
if (this.fromApiJsonHelper.parameterExists(interestCompoundingPeriodTypeParamName, element)) {
final Integer interestCompoundingPeriodType = this.fromApiJsonHelper
.extractIntegerSansLocaleNamed(interestCompoundingPeriodTypeParamName, element);
baseDataValidator.reset().parameter(interestCompoundingPeriodTypeParamName).value(interestCompoundingPeriodType).notNull()
.isOneOfTheseValues(SavingsCompoundingInterestPeriodType.integerValues());
}
if (this.fromApiJsonHelper.parameterExists(interestCalculationTypeParamName, element)) {
final Integer interestCalculationType = this.fromApiJsonHelper.extractIntegerSansLocaleNamed(interestCalculationTypeParamName,
element);
baseDataValidator.reset().parameter(interestCalculationTypeParamName).value(interestCalculationType).notNull().inMinMaxRange(1,
2);
}
if (this.fromApiJsonHelper.parameterExists(interestCalculationDaysInYearTypeParamName, element)) {
final Integer interestCalculationDaysInYearType = this.fromApiJsonHelper
.extractIntegerSansLocaleNamed(interestCalculationDaysInYearTypeParamName, element);
baseDataValidator.reset().parameter(interestCalculationDaysInYearTypeParamName).value(interestCalculationDaysInYearType)
.notNull().isOneOfTheseValues(360, 365);
}
if (this.fromApiJsonHelper.parameterExists(minRequiredOpeningBalanceParamName, element)) {
final BigDecimal minOpeningBalance = this.fromApiJsonHelper.extractBigDecimalWithLocaleNamed(minRequiredOpeningBalanceParamName,
element);
baseDataValidator.reset().parameter(minRequiredOpeningBalanceParamName).value(minOpeningBalance).ignoreIfNull()
.zeroOrPositiveAmount();
}
if (this.fromApiJsonHelper.parameterExists(lockinPeriodFrequencyParamName, element)) {
final Integer lockinPeriodFrequency = this.fromApiJsonHelper.extractIntegerWithLocaleNamed(lockinPeriodFrequencyParamName,
element);
baseDataValidator.reset().parameter(lockinPeriodFrequencyParamName).value(lockinPeriodFrequency).ignoreIfNull()
.integerZeroOrGreater();
}
if (this.fromApiJsonHelper.parameterExists(lockinPeriodFrequencyTypeParamName, element)) {
final Integer lockinPeriodFrequencyType = this.fromApiJsonHelper
.extractIntegerSansLocaleNamed(lockinPeriodFrequencyTypeParamName, element);
baseDataValidator.reset().parameter(lockinPeriodFrequencyTypeParamName).value(lockinPeriodFrequencyType).inMinMaxRange(0, 3);
}
if (this.fromApiJsonHelper.parameterExists(withdrawalFeeForTransfersParamName, element)) {
final Boolean isWithdrawalFeeApplicableForTransfers = this.fromApiJsonHelper
.extractBooleanNamed(withdrawalFeeForTransfersParamName, element);
baseDataValidator.reset().parameter(withdrawalFeeForTransfersParamName).value(isWithdrawalFeeApplicableForTransfers)
.ignoreIfNull().validateForBooleanValue();
}
if (this.fromApiJsonHelper.parameterExists(feeAmountParamName, element)) {
final BigDecimal annualFeeAmount = this.fromApiJsonHelper.extractBigDecimalWithLocaleNamed(feeAmountParamName, element);
baseDataValidator.reset().parameter(feeAmountParamName).value(annualFeeAmount).ignoreIfNull().zeroOrPositiveAmount();
}
if (this.fromApiJsonHelper.parameterExists(feeOnMonthDayParamName, element)) {
final MonthDay monthDayOfAnnualFee = this.fromApiJsonHelper.extractMonthDayNamed(feeOnMonthDayParamName, element);
baseDataValidator.reset().parameter(feeOnMonthDayParamName).value(monthDayOfAnnualFee).ignoreIfNull();
}
// accounting related data validation
Integer accountingRuleType = product.getAccountingType();
if (this.fromApiJsonHelper.parameterExists("accountingRule", element)) {
accountingRuleType = this.fromApiJsonHelper.extractIntegerNamed("accountingRule", element, Locale.getDefault());
baseDataValidator.reset().parameter("accountingRule").value(accountingRuleType).notNull().inMinMaxRange(1, 3);
}
// dormancy
final Boolean isDormancyActive = this.fromApiJsonHelper.parameterExists(isDormancyTrackingActiveParamName, element)
? this.fromApiJsonHelper.extractBooleanNamed(isDormancyTrackingActiveParamName, element)
: product.isDormancyTrackingActive();
if (AccountingValidations.isCashBasedAccounting(accountingRuleType)
|| AccountingValidations.isAccrualPeriodicBasedAccounting(accountingRuleType)) {
savingsProductAccountingDataValidator.evaluateProductAccountingData(accountingRuleType, isDormancyActive, element,
baseDataValidator, DepositAccountType.SAVINGS_DEPOSIT, false);
}
if (null != isDormancyActive && isDormancyActive) {
final Long daysToInact = this.fromApiJsonHelper.parameterExists(daysToInactiveParamName, element)
? this.fromApiJsonHelper.extractLongNamed(daysToInactiveParamName, element)
: product.getDaysToInactive();
baseDataValidator.reset().parameter(daysToInactiveParamName).value(daysToInact).notNull().longGreaterThanZero();
final Long daysToDor = this.fromApiJsonHelper.parameterExists(daysToDormancyParamName, element)
? this.fromApiJsonHelper.extractLongNamed(daysToDormancyParamName, element)
: product.getDaysToDormancy();
baseDataValidator.reset().parameter(daysToDormancyParamName).value(daysToDor).notNull().longGreaterThanZero();
final Long daysToEsc = this.fromApiJsonHelper.parameterExists(daysToEscheatParamName, element)
? this.fromApiJsonHelper.extractLongNamed(daysToEscheatParamName, element)
: product.getDaysToEscheat();
baseDataValidator.reset().parameter(daysToEscheatParamName).value(daysToEsc).notNull().longGreaterThanZero();
if (null != daysToInact && null != daysToDor && null != daysToEsc) {
baseDataValidator.reset().parameter(daysToDormancyParamName).value(daysToDor).longGreaterThanNumber(daysToInact);
baseDataValidator.reset().parameter(daysToEscheatParamName).value(daysToEsc).longGreaterThanNumber(daysToDor);
}
if (this.fromApiJsonHelper.parameterExists(SavingProductAccountingParams.ESCHEAT_LIABILITY.getValue(), element)) {
final Long escheatLiabilityAccountId = this.fromApiJsonHelper
.extractLongNamed(SavingProductAccountingParams.ESCHEAT_LIABILITY.getValue(), element);
baseDataValidator.reset().parameter(SavingProductAccountingParams.ESCHEAT_LIABILITY.getValue())
.value(escheatLiabilityAccountId).notNull().integerGreaterThanZero();
}
}
validateOverdraftParams(baseDataValidator, element);
if (this.fromApiJsonHelper.parameterExists(minBalanceForInterestCalculationParamName, element)) {
final BigDecimal minBalanceForInterestCalculation = this.fromApiJsonHelper
.extractBigDecimalWithLocaleNamed(minBalanceForInterestCalculationParamName, element);
baseDataValidator.reset().parameter(minBalanceForInterestCalculationParamName).value(minBalanceForInterestCalculation)
.ignoreIfNull().zeroOrPositiveAmount();
}
validateTaxWithHoldingParams(baseDataValidator, element, false);
validateLienParams(baseDataValidator, element);
throwExceptionIfValidationWarningsExist(dataValidationErrors);
}
private void throwExceptionIfValidationWarningsExist(final List<ApiParameterError> dataValidationErrors) {
if (!dataValidationErrors.isEmpty()) {
throw new PlatformApiDataValidationException(dataValidationErrors);
}
}
private void validateOverdraftParams(final DataValidatorBuilder baseDataValidator, final JsonElement element) {
if (this.fromApiJsonHelper.parameterExists(allowOverdraftParamName, element)) {
final Boolean allowOverdraft = this.fromApiJsonHelper.extractBooleanNamed(allowOverdraftParamName, element);
baseDataValidator.reset().parameter(allowOverdraftParamName).value(allowOverdraft).ignoreIfNull().validateForBooleanValue();
}
if (this.fromApiJsonHelper.parameterExists(overdraftLimitParamName, element)) {
final BigDecimal overdraftLimit = this.fromApiJsonHelper.extractBigDecimalWithLocaleNamed(overdraftLimitParamName, element);
baseDataValidator.reset().parameter(overdraftLimitParamName).value(overdraftLimit).ignoreIfNull().zeroOrPositiveAmount();
}
if (this.fromApiJsonHelper.parameterExists(nominalAnnualInterestRateOverdraftParamName, element)) {
final BigDecimal nominalAnnualInterestRateOverdraft = this.fromApiJsonHelper
.extractBigDecimalWithLocaleNamed(nominalAnnualInterestRateOverdraftParamName, element);
baseDataValidator.reset().parameter(nominalAnnualInterestRateOverdraftParamName).value(nominalAnnualInterestRateOverdraft)
.ignoreIfNull().zeroOrPositiveAmount();
}
if (this.fromApiJsonHelper.parameterExists(minOverdraftForInterestCalculationParamName, element)) {
final BigDecimal minOverdraftForInterestCalculation = this.fromApiJsonHelper
.extractBigDecimalWithLocaleNamed(minOverdraftForInterestCalculationParamName, element);
baseDataValidator.reset().parameter(minOverdraftForInterestCalculationParamName).value(minOverdraftForInterestCalculation)
.ignoreIfNull().zeroOrPositiveAmount();
}
}
private void validateLienParams(final DataValidatorBuilder baseDataValidator, final JsonElement element) {
if (this.fromApiJsonHelper.parameterExists(lienAllowedParamName, element)) {
final Boolean lienAllowed = this.fromApiJsonHelper.extractBooleanNamed(lienAllowedParamName, element);
baseDataValidator.reset().parameter(lienAllowedParamName).value(lienAllowed).ignoreIfNull().validateForBooleanValue();
}
if (this.fromApiJsonHelper.parameterExists(maxAllowedLienLimitParamName, element)) {
final BigDecimal maxAllowedLienLimit = this.fromApiJsonHelper.extractBigDecimalWithLocaleNamed(maxAllowedLienLimitParamName,
element);
baseDataValidator.reset().parameter(maxAllowedLienLimitParamName).value(maxAllowedLienLimit).ignoreIfNull()
.zeroOrPositiveAmount();
}
if (BooleanUtils.isTrue(this.fromApiJsonHelper.extractBooleanNamed(lienAllowedParamName, element))
&& BooleanUtils.isTrue(this.fromApiJsonHelper.extractBooleanNamed(allowOverdraftParamName, element))) {
final BigDecimal maxAllowedLienLimit = this.fromApiJsonHelper.extractBigDecimalWithLocaleNamed(maxAllowedLienLimitParamName,
element);
final BigDecimal overdraftLimit = this.fromApiJsonHelper.extractBigDecimalWithLocaleNamed(overdraftLimitParamName, element);
if (overdraftLimit.compareTo(maxAllowedLienLimit) > 0) {
baseDataValidator.reset().failWithCodeNoParameterAddedToErrorCode("Overdraft.limit.can.not.be.greater.than.lien.limit");
}
}
}
private void validateTaxWithHoldingParams(final DataValidatorBuilder baseDataValidator, final JsonElement element,
final boolean isCreate) {
if (this.fromApiJsonHelper.parameterExists(withHoldTaxParamName, element)) {
final String withHoldTax = this.fromApiJsonHelper.extractStringNamed(withHoldTaxParamName, element);
baseDataValidator.reset().parameter(withHoldTaxParamName).value(withHoldTax).ignoreIfNull().validateForBooleanValue();
}
Boolean withHoldTax = this.fromApiJsonHelper.extractBooleanNamed(withHoldTaxParamName, element);
if (withHoldTax == null) {
withHoldTax = false;
}
if (this.fromApiJsonHelper.parameterExists(taxGroupIdParamName, element)) {
final Long taxGroupId = this.fromApiJsonHelper.extractLongNamed(taxGroupIdParamName, element);
baseDataValidator.reset().parameter(taxGroupIdParamName).value(taxGroupId).ignoreIfNull().longGreaterThanZero();
if (withHoldTax) {
baseDataValidator.reset().parameter(taxGroupIdParamName).value(taxGroupId).notBlank();
}
} else if (withHoldTax && isCreate) {
final Long taxGroupId = null;
baseDataValidator.reset().parameter(taxGroupIdParamName).value(taxGroupId).notBlank();
}
}
}
|
googleapis/google-api-java-client-services | 35,069 | clients/google-api-services-discoveryengine/v1alpha/2.0.0/com/google/api/services/discoveryengine/v1alpha/model/GoogleCloudDiscoveryengineV1alphaWidgetConfig.java | /*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.discoveryengine.v1alpha.model;
/**
* WidgetConfig captures configs at the Widget level.
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Discovery Engine API. For a detailed explanation see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class GoogleCloudDiscoveryengineV1alphaWidgetConfig extends com.google.api.client.json.GenericJson {
/**
* Will be used for all widget access settings seen in cloud console integration page. Replaces
* top deprecated top level properties.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private GoogleCloudDiscoveryengineV1alphaWidgetConfigAccessSettings accessSettings;
/**
* Whether allow no-auth integration with widget. If set true, public access to search or other
* solutions from widget is allowed without authenication token provided by customer hosted
* backend server.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean allowPublicAccess;
/**
* Allowlisted domains that can load this widget.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<java.lang.String> allowlistedDomains;
/**
* Optional. Output only. Describes the assistant settings of the widget.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private GoogleCloudDiscoveryengineV1alphaWidgetConfigAssistantSettings assistantSettings;
/**
* Output only. Collection components that lists all collections and child data stores associated
* with the widget config, those data sources can be used for filtering in widget service APIs,
* users can return results that from selected data sources.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<GoogleCloudDiscoveryengineV1alphaWidgetConfigCollectionComponent> collectionComponents;
/**
* Output only. Unique obfuscated identifier of a WidgetConfig.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String configId;
/**
* The content search spec that configs the desired behavior of content search.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private GoogleCloudDiscoveryengineV1alphaSearchRequestContentSearchSpec contentSearchSpec;
/**
* Output only. Timestamp the WidgetConfig was created.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private String createTime;
/**
* Optional. Output only. Describes the customer related configurations, currently only used for
* government customers. This field cannot be modified after project onboarding.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private GoogleCloudDiscoveryengineV1alphaWidgetConfigCustomerProvidedConfig customerProvidedConfig;
/**
* Output only. The type of the parent data store.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String dataStoreType;
/**
* Configurable UI configurations per data store.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<GoogleCloudDiscoveryengineV1alphaWidgetConfigDataStoreUiConfig> dataStoreUiConfigs;
/**
* The default ordering for search results if specified. Used to set SearchRequest#order_by on
* applicable requests. https://cloud.google.com/generative-ai-app-builder/docs/reference/rest/v1a
* lpha/projects.locations.dataStores.servingConfigs/search#request-body
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String defaultSearchRequestOrderBy;
/**
* Required. The human readable widget config display name. Used in Discovery UI. This field must
* be a UTF-8 encoded string with a length limit of 128 characters. Otherwise, an INVALID_ARGUMENT
* error is returned.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String displayName;
/**
* Whether or not to enable autocomplete.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean enableAutocomplete;
/**
* Whether to allow conversational search (LLM, multi-turn) or not (non-LLM, single-turn).
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean enableConversationalSearch;
/**
* Optional. Output only. Whether to enable private knowledge graph.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean enablePrivateKnowledgeGraph;
/**
* Turn on or off collecting the search result quality feedback from end users.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean enableQualityFeedback;
/**
* Whether to show the result score.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean enableResultScore;
/**
* Whether to enable safe search.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean enableSafeSearch;
/**
* Whether to enable search-as-you-type behavior for the search widget
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean enableSearchAsYouType;
/**
* Turn on or off summary for each snippets result.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean enableSnippetResultSummary;
/**
* Turn on or off summarization for the search response.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean enableSummarization;
/**
* Whether to enable standalone web app.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean enableWebApp;
/**
* Allows to toggle unstable/experimental features in the widget (or web app)
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.Map<String, java.lang.String> experimentalFeatures;
/**
* The configuration and appearance of facets in the end user view.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<GoogleCloudDiscoveryengineV1alphaWidgetConfigFacetField> facetField;
/**
* The key is the UI component. Mock. Currently supported `title`, `thumbnail`, `url`, `custom1`,
* `custom2`, `custom3`. The value is the name of the field along with its device visibility. The
* 3 custom fields are optional and can be added or removed. `title`, `thumbnail`, `url` are
* required UI components that cannot be removed.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.Map<String, GoogleCloudDiscoveryengineV1alphaWidgetConfigUIComponentField> fieldsUiComponentsMap;
/**
* Optional. Describes the homepage settings of the widget.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private GoogleCloudDiscoveryengineV1alphaWidgetConfigHomepageSetting homepageSetting;
/**
* Output only. The industry vertical that the WidgetConfig registers. The WidgetConfig industry
* vertical is based on the associated Engine.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String industryVertical;
/**
* Output only. Whether LLM is enabled in the corresponding data store.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean llmEnabled;
/**
* Output only. Whether the customer accepted data use terms.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean minimumDataTermAccepted;
/**
* Immutable. The full resource name of the widget config. Format: `projects/{project}/locations/{
* location}/collections/{collection_id}/dataStores/{data_store_id}/widgetConfigs/{widget_config_i
* d}`. This field must be a UTF-8 encoded string with a length limit of 1024 characters.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String name;
/**
* The type of snippet to display in UCS widget. - RESULT_DISPLAY_TYPE_UNSPECIFIED for existing
* users. - SNIPPET for new non-enterprise search users. - EXTRACTIVE_ANSWER for new enterprise
* search users.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String resultDisplayType;
/**
* Required. Immutable. Specifies the solution type that this WidgetConfig can be used for.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String solutionType;
/**
* Describes search widget UI branding settings, such as the widget title, logo, favicons, and
* colors.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private GoogleCloudDiscoveryengineV1alphaWidgetConfigUiBrandingSettings uiBranding;
/**
* Describes general widget search settings as seen in cloud console widget configuration page.
* Replaces top deprecated top level properties.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private GoogleCloudDiscoveryengineV1alphaWidgetConfigUiSettings uiSettings;
/**
* Output only. Timestamp the WidgetConfig was updated.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private String updateTime;
/**
* Will be used for all widget access settings seen in cloud console integration page. Replaces
* top deprecated top level properties.
* @return value or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1alphaWidgetConfigAccessSettings getAccessSettings() {
return accessSettings;
}
/**
* Will be used for all widget access settings seen in cloud console integration page. Replaces
* top deprecated top level properties.
* @param accessSettings accessSettings or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1alphaWidgetConfig setAccessSettings(GoogleCloudDiscoveryengineV1alphaWidgetConfigAccessSettings accessSettings) {
this.accessSettings = accessSettings;
return this;
}
/**
* Whether allow no-auth integration with widget. If set true, public access to search or other
* solutions from widget is allowed without authenication token provided by customer hosted
* backend server.
* @return value or {@code null} for none
*/
public java.lang.Boolean getAllowPublicAccess() {
return allowPublicAccess;
}
/**
* Whether allow no-auth integration with widget. If set true, public access to search or other
* solutions from widget is allowed without authenication token provided by customer hosted
* backend server.
* @param allowPublicAccess allowPublicAccess or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1alphaWidgetConfig setAllowPublicAccess(java.lang.Boolean allowPublicAccess) {
this.allowPublicAccess = allowPublicAccess;
return this;
}
/**
* Allowlisted domains that can load this widget.
* @return value or {@code null} for none
*/
public java.util.List<java.lang.String> getAllowlistedDomains() {
return allowlistedDomains;
}
/**
* Allowlisted domains that can load this widget.
* @param allowlistedDomains allowlistedDomains or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1alphaWidgetConfig setAllowlistedDomains(java.util.List<java.lang.String> allowlistedDomains) {
this.allowlistedDomains = allowlistedDomains;
return this;
}
/**
* Optional. Output only. Describes the assistant settings of the widget.
* @return value or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1alphaWidgetConfigAssistantSettings getAssistantSettings() {
return assistantSettings;
}
/**
* Optional. Output only. Describes the assistant settings of the widget.
* @param assistantSettings assistantSettings or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1alphaWidgetConfig setAssistantSettings(GoogleCloudDiscoveryengineV1alphaWidgetConfigAssistantSettings assistantSettings) {
this.assistantSettings = assistantSettings;
return this;
}
/**
* Output only. Collection components that lists all collections and child data stores associated
* with the widget config, those data sources can be used for filtering in widget service APIs,
* users can return results that from selected data sources.
* @return value or {@code null} for none
*/
public java.util.List<GoogleCloudDiscoveryengineV1alphaWidgetConfigCollectionComponent> getCollectionComponents() {
return collectionComponents;
}
/**
* Output only. Collection components that lists all collections and child data stores associated
* with the widget config, those data sources can be used for filtering in widget service APIs,
* users can return results that from selected data sources.
* @param collectionComponents collectionComponents or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1alphaWidgetConfig setCollectionComponents(java.util.List<GoogleCloudDiscoveryengineV1alphaWidgetConfigCollectionComponent> collectionComponents) {
this.collectionComponents = collectionComponents;
return this;
}
/**
* Output only. Unique obfuscated identifier of a WidgetConfig.
* @return value or {@code null} for none
*/
public java.lang.String getConfigId() {
return configId;
}
/**
* Output only. Unique obfuscated identifier of a WidgetConfig.
* @param configId configId or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1alphaWidgetConfig setConfigId(java.lang.String configId) {
this.configId = configId;
return this;
}
/**
* The content search spec that configs the desired behavior of content search.
* @return value or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1alphaSearchRequestContentSearchSpec getContentSearchSpec() {
return contentSearchSpec;
}
/**
* The content search spec that configs the desired behavior of content search.
* @param contentSearchSpec contentSearchSpec or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1alphaWidgetConfig setContentSearchSpec(GoogleCloudDiscoveryengineV1alphaSearchRequestContentSearchSpec contentSearchSpec) {
this.contentSearchSpec = contentSearchSpec;
return this;
}
/**
* Output only. Timestamp the WidgetConfig was created.
* @return value or {@code null} for none
*/
public String getCreateTime() {
return createTime;
}
/**
* Output only. Timestamp the WidgetConfig was created.
* @param createTime createTime or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1alphaWidgetConfig setCreateTime(String createTime) {
this.createTime = createTime;
return this;
}
/**
* Optional. Output only. Describes the customer related configurations, currently only used for
* government customers. This field cannot be modified after project onboarding.
* @return value or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1alphaWidgetConfigCustomerProvidedConfig getCustomerProvidedConfig() {
return customerProvidedConfig;
}
/**
* Optional. Output only. Describes the customer related configurations, currently only used for
* government customers. This field cannot be modified after project onboarding.
* @param customerProvidedConfig customerProvidedConfig or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1alphaWidgetConfig setCustomerProvidedConfig(GoogleCloudDiscoveryengineV1alphaWidgetConfigCustomerProvidedConfig customerProvidedConfig) {
this.customerProvidedConfig = customerProvidedConfig;
return this;
}
/**
* Output only. The type of the parent data store.
* @return value or {@code null} for none
*/
public java.lang.String getDataStoreType() {
return dataStoreType;
}
/**
* Output only. The type of the parent data store.
* @param dataStoreType dataStoreType or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1alphaWidgetConfig setDataStoreType(java.lang.String dataStoreType) {
this.dataStoreType = dataStoreType;
return this;
}
/**
* Configurable UI configurations per data store.
* @return value or {@code null} for none
*/
public java.util.List<GoogleCloudDiscoveryengineV1alphaWidgetConfigDataStoreUiConfig> getDataStoreUiConfigs() {
return dataStoreUiConfigs;
}
/**
* Configurable UI configurations per data store.
* @param dataStoreUiConfigs dataStoreUiConfigs or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1alphaWidgetConfig setDataStoreUiConfigs(java.util.List<GoogleCloudDiscoveryengineV1alphaWidgetConfigDataStoreUiConfig> dataStoreUiConfigs) {
this.dataStoreUiConfigs = dataStoreUiConfigs;
return this;
}
/**
* The default ordering for search results if specified. Used to set SearchRequest#order_by on
* applicable requests. https://cloud.google.com/generative-ai-app-builder/docs/reference/rest/v1a
* lpha/projects.locations.dataStores.servingConfigs/search#request-body
* @return value or {@code null} for none
*/
public java.lang.String getDefaultSearchRequestOrderBy() {
return defaultSearchRequestOrderBy;
}
/**
* The default ordering for search results if specified. Used to set SearchRequest#order_by on
* applicable requests. https://cloud.google.com/generative-ai-app-builder/docs/reference/rest/v1a
* lpha/projects.locations.dataStores.servingConfigs/search#request-body
* @param defaultSearchRequestOrderBy defaultSearchRequestOrderBy or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1alphaWidgetConfig setDefaultSearchRequestOrderBy(java.lang.String defaultSearchRequestOrderBy) {
this.defaultSearchRequestOrderBy = defaultSearchRequestOrderBy;
return this;
}
/**
* Required. The human readable widget config display name. Used in Discovery UI. This field must
* be a UTF-8 encoded string with a length limit of 128 characters. Otherwise, an INVALID_ARGUMENT
* error is returned.
* @return value or {@code null} for none
*/
public java.lang.String getDisplayName() {
return displayName;
}
/**
* Required. The human readable widget config display name. Used in Discovery UI. This field must
* be a UTF-8 encoded string with a length limit of 128 characters. Otherwise, an INVALID_ARGUMENT
* error is returned.
* @param displayName displayName or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1alphaWidgetConfig setDisplayName(java.lang.String displayName) {
this.displayName = displayName;
return this;
}
/**
* Whether or not to enable autocomplete.
* @return value or {@code null} for none
*/
public java.lang.Boolean getEnableAutocomplete() {
return enableAutocomplete;
}
/**
* Whether or not to enable autocomplete.
* @param enableAutocomplete enableAutocomplete or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1alphaWidgetConfig setEnableAutocomplete(java.lang.Boolean enableAutocomplete) {
this.enableAutocomplete = enableAutocomplete;
return this;
}
/**
* Whether to allow conversational search (LLM, multi-turn) or not (non-LLM, single-turn).
* @return value or {@code null} for none
*/
public java.lang.Boolean getEnableConversationalSearch() {
return enableConversationalSearch;
}
/**
* Whether to allow conversational search (LLM, multi-turn) or not (non-LLM, single-turn).
* @param enableConversationalSearch enableConversationalSearch or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1alphaWidgetConfig setEnableConversationalSearch(java.lang.Boolean enableConversationalSearch) {
this.enableConversationalSearch = enableConversationalSearch;
return this;
}
/**
* Optional. Output only. Whether to enable private knowledge graph.
* @return value or {@code null} for none
*/
public java.lang.Boolean getEnablePrivateKnowledgeGraph() {
return enablePrivateKnowledgeGraph;
}
/**
* Optional. Output only. Whether to enable private knowledge graph.
* @param enablePrivateKnowledgeGraph enablePrivateKnowledgeGraph or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1alphaWidgetConfig setEnablePrivateKnowledgeGraph(java.lang.Boolean enablePrivateKnowledgeGraph) {
this.enablePrivateKnowledgeGraph = enablePrivateKnowledgeGraph;
return this;
}
/**
* Turn on or off collecting the search result quality feedback from end users.
* @return value or {@code null} for none
*/
public java.lang.Boolean getEnableQualityFeedback() {
return enableQualityFeedback;
}
/**
* Turn on or off collecting the search result quality feedback from end users.
* @param enableQualityFeedback enableQualityFeedback or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1alphaWidgetConfig setEnableQualityFeedback(java.lang.Boolean enableQualityFeedback) {
this.enableQualityFeedback = enableQualityFeedback;
return this;
}
/**
* Whether to show the result score.
* @return value or {@code null} for none
*/
public java.lang.Boolean getEnableResultScore() {
return enableResultScore;
}
/**
* Whether to show the result score.
* @param enableResultScore enableResultScore or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1alphaWidgetConfig setEnableResultScore(java.lang.Boolean enableResultScore) {
this.enableResultScore = enableResultScore;
return this;
}
/**
* Whether to enable safe search.
* @return value or {@code null} for none
*/
public java.lang.Boolean getEnableSafeSearch() {
return enableSafeSearch;
}
/**
* Whether to enable safe search.
* @param enableSafeSearch enableSafeSearch or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1alphaWidgetConfig setEnableSafeSearch(java.lang.Boolean enableSafeSearch) {
this.enableSafeSearch = enableSafeSearch;
return this;
}
/**
* Whether to enable search-as-you-type behavior for the search widget
* @return value or {@code null} for none
*/
public java.lang.Boolean getEnableSearchAsYouType() {
return enableSearchAsYouType;
}
/**
* Whether to enable search-as-you-type behavior for the search widget
* @param enableSearchAsYouType enableSearchAsYouType or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1alphaWidgetConfig setEnableSearchAsYouType(java.lang.Boolean enableSearchAsYouType) {
this.enableSearchAsYouType = enableSearchAsYouType;
return this;
}
/**
* Turn on or off summary for each snippets result.
* @return value or {@code null} for none
*/
public java.lang.Boolean getEnableSnippetResultSummary() {
return enableSnippetResultSummary;
}
/**
* Turn on or off summary for each snippets result.
* @param enableSnippetResultSummary enableSnippetResultSummary or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1alphaWidgetConfig setEnableSnippetResultSummary(java.lang.Boolean enableSnippetResultSummary) {
this.enableSnippetResultSummary = enableSnippetResultSummary;
return this;
}
/**
* Turn on or off summarization for the search response.
* @return value or {@code null} for none
*/
public java.lang.Boolean getEnableSummarization() {
return enableSummarization;
}
/**
* Turn on or off summarization for the search response.
* @param enableSummarization enableSummarization or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1alphaWidgetConfig setEnableSummarization(java.lang.Boolean enableSummarization) {
this.enableSummarization = enableSummarization;
return this;
}
/**
* Whether to enable standalone web app.
* @return value or {@code null} for none
*/
public java.lang.Boolean getEnableWebApp() {
return enableWebApp;
}
/**
* Whether to enable standalone web app.
* @param enableWebApp enableWebApp or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1alphaWidgetConfig setEnableWebApp(java.lang.Boolean enableWebApp) {
this.enableWebApp = enableWebApp;
return this;
}
/**
* Allows to toggle unstable/experimental features in the widget (or web app)
* @return value or {@code null} for none
*/
public java.util.Map<String, java.lang.String> getExperimentalFeatures() {
return experimentalFeatures;
}
/**
* Allows to toggle unstable/experimental features in the widget (or web app)
* @param experimentalFeatures experimentalFeatures or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1alphaWidgetConfig setExperimentalFeatures(java.util.Map<String, java.lang.String> experimentalFeatures) {
this.experimentalFeatures = experimentalFeatures;
return this;
}
/**
* The configuration and appearance of facets in the end user view.
* @return value or {@code null} for none
*/
public java.util.List<GoogleCloudDiscoveryengineV1alphaWidgetConfigFacetField> getFacetField() {
return facetField;
}
/**
* The configuration and appearance of facets in the end user view.
* @param facetField facetField or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1alphaWidgetConfig setFacetField(java.util.List<GoogleCloudDiscoveryengineV1alphaWidgetConfigFacetField> facetField) {
this.facetField = facetField;
return this;
}
/**
* The key is the UI component. Mock. Currently supported `title`, `thumbnail`, `url`, `custom1`,
* `custom2`, `custom3`. The value is the name of the field along with its device visibility. The
* 3 custom fields are optional and can be added or removed. `title`, `thumbnail`, `url` are
* required UI components that cannot be removed.
* @return value or {@code null} for none
*/
public java.util.Map<String, GoogleCloudDiscoveryengineV1alphaWidgetConfigUIComponentField> getFieldsUiComponentsMap() {
return fieldsUiComponentsMap;
}
/**
* The key is the UI component. Mock. Currently supported `title`, `thumbnail`, `url`, `custom1`,
* `custom2`, `custom3`. The value is the name of the field along with its device visibility. The
* 3 custom fields are optional and can be added or removed. `title`, `thumbnail`, `url` are
* required UI components that cannot be removed.
* @param fieldsUiComponentsMap fieldsUiComponentsMap or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1alphaWidgetConfig setFieldsUiComponentsMap(java.util.Map<String, GoogleCloudDiscoveryengineV1alphaWidgetConfigUIComponentField> fieldsUiComponentsMap) {
this.fieldsUiComponentsMap = fieldsUiComponentsMap;
return this;
}
/**
* Optional. Describes the homepage settings of the widget.
* @return value or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1alphaWidgetConfigHomepageSetting getHomepageSetting() {
return homepageSetting;
}
/**
* Optional. Describes the homepage settings of the widget.
* @param homepageSetting homepageSetting or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1alphaWidgetConfig setHomepageSetting(GoogleCloudDiscoveryengineV1alphaWidgetConfigHomepageSetting homepageSetting) {
this.homepageSetting = homepageSetting;
return this;
}
/**
* Output only. The industry vertical that the WidgetConfig registers. The WidgetConfig industry
* vertical is based on the associated Engine.
* @return value or {@code null} for none
*/
public java.lang.String getIndustryVertical() {
return industryVertical;
}
/**
* Output only. The industry vertical that the WidgetConfig registers. The WidgetConfig industry
* vertical is based on the associated Engine.
* @param industryVertical industryVertical or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1alphaWidgetConfig setIndustryVertical(java.lang.String industryVertical) {
this.industryVertical = industryVertical;
return this;
}
/**
* Output only. Whether LLM is enabled in the corresponding data store.
* @return value or {@code null} for none
*/
public java.lang.Boolean getLlmEnabled() {
return llmEnabled;
}
/**
* Output only. Whether LLM is enabled in the corresponding data store.
* @param llmEnabled llmEnabled or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1alphaWidgetConfig setLlmEnabled(java.lang.Boolean llmEnabled) {
this.llmEnabled = llmEnabled;
return this;
}
/**
* Output only. Whether the customer accepted data use terms.
* @return value or {@code null} for none
*/
public java.lang.Boolean getMinimumDataTermAccepted() {
return minimumDataTermAccepted;
}
/**
* Output only. Whether the customer accepted data use terms.
* @param minimumDataTermAccepted minimumDataTermAccepted or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1alphaWidgetConfig setMinimumDataTermAccepted(java.lang.Boolean minimumDataTermAccepted) {
this.minimumDataTermAccepted = minimumDataTermAccepted;
return this;
}
/**
* Immutable. The full resource name of the widget config. Format: `projects/{project}/locations/{
* location}/collections/{collection_id}/dataStores/{data_store_id}/widgetConfigs/{widget_config_i
* d}`. This field must be a UTF-8 encoded string with a length limit of 1024 characters.
* @return value or {@code null} for none
*/
public java.lang.String getName() {
return name;
}
/**
* Immutable. The full resource name of the widget config. Format: `projects/{project}/locations/{
* location}/collections/{collection_id}/dataStores/{data_store_id}/widgetConfigs/{widget_config_i
* d}`. This field must be a UTF-8 encoded string with a length limit of 1024 characters.
* @param name name or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1alphaWidgetConfig setName(java.lang.String name) {
this.name = name;
return this;
}
/**
* The type of snippet to display in UCS widget. - RESULT_DISPLAY_TYPE_UNSPECIFIED for existing
* users. - SNIPPET for new non-enterprise search users. - EXTRACTIVE_ANSWER for new enterprise
* search users.
* @return value or {@code null} for none
*/
public java.lang.String getResultDisplayType() {
return resultDisplayType;
}
/**
* The type of snippet to display in UCS widget. - RESULT_DISPLAY_TYPE_UNSPECIFIED for existing
* users. - SNIPPET for new non-enterprise search users. - EXTRACTIVE_ANSWER for new enterprise
* search users.
* @param resultDisplayType resultDisplayType or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1alphaWidgetConfig setResultDisplayType(java.lang.String resultDisplayType) {
this.resultDisplayType = resultDisplayType;
return this;
}
/**
* Required. Immutable. Specifies the solution type that this WidgetConfig can be used for.
* @return value or {@code null} for none
*/
public java.lang.String getSolutionType() {
return solutionType;
}
/**
* Required. Immutable. Specifies the solution type that this WidgetConfig can be used for.
* @param solutionType solutionType or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1alphaWidgetConfig setSolutionType(java.lang.String solutionType) {
this.solutionType = solutionType;
return this;
}
/**
* Describes search widget UI branding settings, such as the widget title, logo, favicons, and
* colors.
* @return value or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1alphaWidgetConfigUiBrandingSettings getUiBranding() {
return uiBranding;
}
/**
* Describes search widget UI branding settings, such as the widget title, logo, favicons, and
* colors.
* @param uiBranding uiBranding or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1alphaWidgetConfig setUiBranding(GoogleCloudDiscoveryengineV1alphaWidgetConfigUiBrandingSettings uiBranding) {
this.uiBranding = uiBranding;
return this;
}
/**
* Describes general widget search settings as seen in cloud console widget configuration page.
* Replaces top deprecated top level properties.
* @return value or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1alphaWidgetConfigUiSettings getUiSettings() {
return uiSettings;
}
/**
* Describes general widget search settings as seen in cloud console widget configuration page.
* Replaces top deprecated top level properties.
* @param uiSettings uiSettings or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1alphaWidgetConfig setUiSettings(GoogleCloudDiscoveryengineV1alphaWidgetConfigUiSettings uiSettings) {
this.uiSettings = uiSettings;
return this;
}
/**
* Output only. Timestamp the WidgetConfig was updated.
* @return value or {@code null} for none
*/
public String getUpdateTime() {
return updateTime;
}
/**
* Output only. Timestamp the WidgetConfig was updated.
* @param updateTime updateTime or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1alphaWidgetConfig setUpdateTime(String updateTime) {
this.updateTime = updateTime;
return this;
}
@Override
public GoogleCloudDiscoveryengineV1alphaWidgetConfig set(String fieldName, Object value) {
return (GoogleCloudDiscoveryengineV1alphaWidgetConfig) super.set(fieldName, value);
}
@Override
public GoogleCloudDiscoveryengineV1alphaWidgetConfig clone() {
return (GoogleCloudDiscoveryengineV1alphaWidgetConfig) super.clone();
}
}
|
googleapis/google-cloud-java | 34,924 | java-deploy/proto-google-cloud-deploy-v1/src/main/java/com/google/cloud/deploy/v1/RetryAttempt.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/deploy/v1/cloud_deploy.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.deploy.v1;
/**
*
*
* <pre>
* RetryAttempt represents an action of retrying the failed Cloud Deploy job.
* </pre>
*
* Protobuf type {@code google.cloud.deploy.v1.RetryAttempt}
*/
public final class RetryAttempt extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.deploy.v1.RetryAttempt)
RetryAttemptOrBuilder {
private static final long serialVersionUID = 0L;
// Use RetryAttempt.newBuilder() to construct.
private RetryAttempt(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private RetryAttempt() {
state_ = 0;
stateDesc_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new RetryAttempt();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.deploy.v1.CloudDeployProto
.internal_static_google_cloud_deploy_v1_RetryAttempt_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.deploy.v1.CloudDeployProto
.internal_static_google_cloud_deploy_v1_RetryAttempt_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.deploy.v1.RetryAttempt.class,
com.google.cloud.deploy.v1.RetryAttempt.Builder.class);
}
private int bitField0_;
public static final int ATTEMPT_FIELD_NUMBER = 1;
private long attempt_ = 0L;
/**
*
*
* <pre>
* Output only. The index of this retry attempt.
* </pre>
*
* <code>int64 attempt = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The attempt.
*/
@java.lang.Override
public long getAttempt() {
return attempt_;
}
public static final int WAIT_FIELD_NUMBER = 2;
private com.google.protobuf.Duration wait_;
/**
*
*
* <pre>
* Output only. How long the operation will be paused.
* </pre>
*
* <code>.google.protobuf.Duration wait = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return Whether the wait field is set.
*/
@java.lang.Override
public boolean hasWait() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Output only. How long the operation will be paused.
* </pre>
*
* <code>.google.protobuf.Duration wait = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The wait.
*/
@java.lang.Override
public com.google.protobuf.Duration getWait() {
return wait_ == null ? com.google.protobuf.Duration.getDefaultInstance() : wait_;
}
/**
*
*
* <pre>
* Output only. How long the operation will be paused.
* </pre>
*
* <code>.google.protobuf.Duration wait = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*/
@java.lang.Override
public com.google.protobuf.DurationOrBuilder getWaitOrBuilder() {
return wait_ == null ? com.google.protobuf.Duration.getDefaultInstance() : wait_;
}
public static final int STATE_FIELD_NUMBER = 5;
private int state_ = 0;
/**
*
*
* <pre>
* Output only. Valid state of this retry action.
* </pre>
*
* <code>
* .google.cloud.deploy.v1.RepairState state = 5 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The enum numeric value on the wire for state.
*/
@java.lang.Override
public int getStateValue() {
return state_;
}
/**
*
*
* <pre>
* Output only. Valid state of this retry action.
* </pre>
*
* <code>
* .google.cloud.deploy.v1.RepairState state = 5 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The state.
*/
@java.lang.Override
public com.google.cloud.deploy.v1.RepairState getState() {
com.google.cloud.deploy.v1.RepairState result =
com.google.cloud.deploy.v1.RepairState.forNumber(state_);
return result == null ? com.google.cloud.deploy.v1.RepairState.UNRECOGNIZED : result;
}
public static final int STATE_DESC_FIELD_NUMBER = 6;
@SuppressWarnings("serial")
private volatile java.lang.Object stateDesc_ = "";
/**
*
*
* <pre>
* Output only. Description of the state of the Retry.
* </pre>
*
* <code>string state_desc = 6 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The stateDesc.
*/
@java.lang.Override
public java.lang.String getStateDesc() {
java.lang.Object ref = stateDesc_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
stateDesc_ = s;
return s;
}
}
/**
*
*
* <pre>
* Output only. Description of the state of the Retry.
* </pre>
*
* <code>string state_desc = 6 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The bytes for stateDesc.
*/
@java.lang.Override
public com.google.protobuf.ByteString getStateDescBytes() {
java.lang.Object ref = stateDesc_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
stateDesc_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (attempt_ != 0L) {
output.writeInt64(1, attempt_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(2, getWait());
}
if (state_ != com.google.cloud.deploy.v1.RepairState.REPAIR_STATE_UNSPECIFIED.getNumber()) {
output.writeEnum(5, state_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(stateDesc_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 6, stateDesc_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (attempt_ != 0L) {
size += com.google.protobuf.CodedOutputStream.computeInt64Size(1, attempt_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getWait());
}
if (state_ != com.google.cloud.deploy.v1.RepairState.REPAIR_STATE_UNSPECIFIED.getNumber()) {
size += com.google.protobuf.CodedOutputStream.computeEnumSize(5, state_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(stateDesc_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(6, stateDesc_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.deploy.v1.RetryAttempt)) {
return super.equals(obj);
}
com.google.cloud.deploy.v1.RetryAttempt other = (com.google.cloud.deploy.v1.RetryAttempt) obj;
if (getAttempt() != other.getAttempt()) return false;
if (hasWait() != other.hasWait()) return false;
if (hasWait()) {
if (!getWait().equals(other.getWait())) return false;
}
if (state_ != other.state_) return false;
if (!getStateDesc().equals(other.getStateDesc())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + ATTEMPT_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashLong(getAttempt());
if (hasWait()) {
hash = (37 * hash) + WAIT_FIELD_NUMBER;
hash = (53 * hash) + getWait().hashCode();
}
hash = (37 * hash) + STATE_FIELD_NUMBER;
hash = (53 * hash) + state_;
hash = (37 * hash) + STATE_DESC_FIELD_NUMBER;
hash = (53 * hash) + getStateDesc().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.deploy.v1.RetryAttempt parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.deploy.v1.RetryAttempt parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.deploy.v1.RetryAttempt parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.deploy.v1.RetryAttempt parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.deploy.v1.RetryAttempt parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.deploy.v1.RetryAttempt parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.deploy.v1.RetryAttempt parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.deploy.v1.RetryAttempt parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.deploy.v1.RetryAttempt parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.deploy.v1.RetryAttempt parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.deploy.v1.RetryAttempt parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.deploy.v1.RetryAttempt parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.deploy.v1.RetryAttempt prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* RetryAttempt represents an action of retrying the failed Cloud Deploy job.
* </pre>
*
* Protobuf type {@code google.cloud.deploy.v1.RetryAttempt}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.deploy.v1.RetryAttempt)
com.google.cloud.deploy.v1.RetryAttemptOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.deploy.v1.CloudDeployProto
.internal_static_google_cloud_deploy_v1_RetryAttempt_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.deploy.v1.CloudDeployProto
.internal_static_google_cloud_deploy_v1_RetryAttempt_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.deploy.v1.RetryAttempt.class,
com.google.cloud.deploy.v1.RetryAttempt.Builder.class);
}
// Construct using com.google.cloud.deploy.v1.RetryAttempt.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getWaitFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
attempt_ = 0L;
wait_ = null;
if (waitBuilder_ != null) {
waitBuilder_.dispose();
waitBuilder_ = null;
}
state_ = 0;
stateDesc_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.deploy.v1.CloudDeployProto
.internal_static_google_cloud_deploy_v1_RetryAttempt_descriptor;
}
@java.lang.Override
public com.google.cloud.deploy.v1.RetryAttempt getDefaultInstanceForType() {
return com.google.cloud.deploy.v1.RetryAttempt.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.deploy.v1.RetryAttempt build() {
com.google.cloud.deploy.v1.RetryAttempt result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.deploy.v1.RetryAttempt buildPartial() {
com.google.cloud.deploy.v1.RetryAttempt result =
new com.google.cloud.deploy.v1.RetryAttempt(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.deploy.v1.RetryAttempt result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.attempt_ = attempt_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.wait_ = waitBuilder_ == null ? wait_ : waitBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.state_ = state_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.stateDesc_ = stateDesc_;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.deploy.v1.RetryAttempt) {
return mergeFrom((com.google.cloud.deploy.v1.RetryAttempt) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.deploy.v1.RetryAttempt other) {
if (other == com.google.cloud.deploy.v1.RetryAttempt.getDefaultInstance()) return this;
if (other.getAttempt() != 0L) {
setAttempt(other.getAttempt());
}
if (other.hasWait()) {
mergeWait(other.getWait());
}
if (other.state_ != 0) {
setStateValue(other.getStateValue());
}
if (!other.getStateDesc().isEmpty()) {
stateDesc_ = other.stateDesc_;
bitField0_ |= 0x00000008;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8:
{
attempt_ = input.readInt64();
bitField0_ |= 0x00000001;
break;
} // case 8
case 18:
{
input.readMessage(getWaitFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
case 40:
{
state_ = input.readEnum();
bitField0_ |= 0x00000004;
break;
} // case 40
case 50:
{
stateDesc_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000008;
break;
} // case 50
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private long attempt_;
/**
*
*
* <pre>
* Output only. The index of this retry attempt.
* </pre>
*
* <code>int64 attempt = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The attempt.
*/
@java.lang.Override
public long getAttempt() {
return attempt_;
}
/**
*
*
* <pre>
* Output only. The index of this retry attempt.
* </pre>
*
* <code>int64 attempt = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @param value The attempt to set.
* @return This builder for chaining.
*/
public Builder setAttempt(long value) {
attempt_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. The index of this retry attempt.
* </pre>
*
* <code>int64 attempt = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return This builder for chaining.
*/
public Builder clearAttempt() {
bitField0_ = (bitField0_ & ~0x00000001);
attempt_ = 0L;
onChanged();
return this;
}
private com.google.protobuf.Duration wait_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Duration,
com.google.protobuf.Duration.Builder,
com.google.protobuf.DurationOrBuilder>
waitBuilder_;
/**
*
*
* <pre>
* Output only. How long the operation will be paused.
* </pre>
*
* <code>.google.protobuf.Duration wait = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return Whether the wait field is set.
*/
public boolean hasWait() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Output only. How long the operation will be paused.
* </pre>
*
* <code>.google.protobuf.Duration wait = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The wait.
*/
public com.google.protobuf.Duration getWait() {
if (waitBuilder_ == null) {
return wait_ == null ? com.google.protobuf.Duration.getDefaultInstance() : wait_;
} else {
return waitBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Output only. How long the operation will be paused.
* </pre>
*
* <code>.google.protobuf.Duration wait = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*/
public Builder setWait(com.google.protobuf.Duration value) {
if (waitBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
wait_ = value;
} else {
waitBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. How long the operation will be paused.
* </pre>
*
* <code>.google.protobuf.Duration wait = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*/
public Builder setWait(com.google.protobuf.Duration.Builder builderForValue) {
if (waitBuilder_ == null) {
wait_ = builderForValue.build();
} else {
waitBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. How long the operation will be paused.
* </pre>
*
* <code>.google.protobuf.Duration wait = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*/
public Builder mergeWait(com.google.protobuf.Duration value) {
if (waitBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& wait_ != null
&& wait_ != com.google.protobuf.Duration.getDefaultInstance()) {
getWaitBuilder().mergeFrom(value);
} else {
wait_ = value;
}
} else {
waitBuilder_.mergeFrom(value);
}
if (wait_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Output only. How long the operation will be paused.
* </pre>
*
* <code>.google.protobuf.Duration wait = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*/
public Builder clearWait() {
bitField0_ = (bitField0_ & ~0x00000002);
wait_ = null;
if (waitBuilder_ != null) {
waitBuilder_.dispose();
waitBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. How long the operation will be paused.
* </pre>
*
* <code>.google.protobuf.Duration wait = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*/
public com.google.protobuf.Duration.Builder getWaitBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getWaitFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Output only. How long the operation will be paused.
* </pre>
*
* <code>.google.protobuf.Duration wait = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*/
public com.google.protobuf.DurationOrBuilder getWaitOrBuilder() {
if (waitBuilder_ != null) {
return waitBuilder_.getMessageOrBuilder();
} else {
return wait_ == null ? com.google.protobuf.Duration.getDefaultInstance() : wait_;
}
}
/**
*
*
* <pre>
* Output only. How long the operation will be paused.
* </pre>
*
* <code>.google.protobuf.Duration wait = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Duration,
com.google.protobuf.Duration.Builder,
com.google.protobuf.DurationOrBuilder>
getWaitFieldBuilder() {
if (waitBuilder_ == null) {
waitBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Duration,
com.google.protobuf.Duration.Builder,
com.google.protobuf.DurationOrBuilder>(
getWait(), getParentForChildren(), isClean());
wait_ = null;
}
return waitBuilder_;
}
private int state_ = 0;
/**
*
*
* <pre>
* Output only. Valid state of this retry action.
* </pre>
*
* <code>
* .google.cloud.deploy.v1.RepairState state = 5 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The enum numeric value on the wire for state.
*/
@java.lang.Override
public int getStateValue() {
return state_;
}
/**
*
*
* <pre>
* Output only. Valid state of this retry action.
* </pre>
*
* <code>
* .google.cloud.deploy.v1.RepairState state = 5 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @param value The enum numeric value on the wire for state to set.
* @return This builder for chaining.
*/
public Builder setStateValue(int value) {
state_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. Valid state of this retry action.
* </pre>
*
* <code>
* .google.cloud.deploy.v1.RepairState state = 5 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The state.
*/
@java.lang.Override
public com.google.cloud.deploy.v1.RepairState getState() {
com.google.cloud.deploy.v1.RepairState result =
com.google.cloud.deploy.v1.RepairState.forNumber(state_);
return result == null ? com.google.cloud.deploy.v1.RepairState.UNRECOGNIZED : result;
}
/**
*
*
* <pre>
* Output only. Valid state of this retry action.
* </pre>
*
* <code>
* .google.cloud.deploy.v1.RepairState state = 5 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @param value The state to set.
* @return This builder for chaining.
*/
public Builder setState(com.google.cloud.deploy.v1.RepairState value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000004;
state_ = value.getNumber();
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. Valid state of this retry action.
* </pre>
*
* <code>
* .google.cloud.deploy.v1.RepairState state = 5 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return This builder for chaining.
*/
public Builder clearState() {
bitField0_ = (bitField0_ & ~0x00000004);
state_ = 0;
onChanged();
return this;
}
private java.lang.Object stateDesc_ = "";
/**
*
*
* <pre>
* Output only. Description of the state of the Retry.
* </pre>
*
* <code>string state_desc = 6 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The stateDesc.
*/
public java.lang.String getStateDesc() {
java.lang.Object ref = stateDesc_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
stateDesc_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Output only. Description of the state of the Retry.
* </pre>
*
* <code>string state_desc = 6 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The bytes for stateDesc.
*/
public com.google.protobuf.ByteString getStateDescBytes() {
java.lang.Object ref = stateDesc_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
stateDesc_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Output only. Description of the state of the Retry.
* </pre>
*
* <code>string state_desc = 6 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @param value The stateDesc to set.
* @return This builder for chaining.
*/
public Builder setStateDesc(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
stateDesc_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. Description of the state of the Retry.
* </pre>
*
* <code>string state_desc = 6 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return This builder for chaining.
*/
public Builder clearStateDesc() {
stateDesc_ = getDefaultInstance().getStateDesc();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. Description of the state of the Retry.
* </pre>
*
* <code>string state_desc = 6 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @param value The bytes for stateDesc to set.
* @return This builder for chaining.
*/
public Builder setStateDescBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
stateDesc_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.deploy.v1.RetryAttempt)
}
// @@protoc_insertion_point(class_scope:google.cloud.deploy.v1.RetryAttempt)
private static final com.google.cloud.deploy.v1.RetryAttempt DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.deploy.v1.RetryAttempt();
}
public static com.google.cloud.deploy.v1.RetryAttempt getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<RetryAttempt> PARSER =
new com.google.protobuf.AbstractParser<RetryAttempt>() {
@java.lang.Override
public RetryAttempt parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<RetryAttempt> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<RetryAttempt> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.deploy.v1.RetryAttempt getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 34,938 | java-monitoring/proto-google-cloud-monitoring-v3/src/main/java/com/google/monitoring/v3/ListSnoozesResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/monitoring/v3/snooze_service.proto
// Protobuf Java Version: 3.25.8
package com.google.monitoring.v3;
/**
*
*
* <pre>
* The results of a successful `ListSnoozes` call, containing the matching
* `Snooze`s.
* </pre>
*
* Protobuf type {@code google.monitoring.v3.ListSnoozesResponse}
*/
public final class ListSnoozesResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.monitoring.v3.ListSnoozesResponse)
ListSnoozesResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListSnoozesResponse.newBuilder() to construct.
private ListSnoozesResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListSnoozesResponse() {
snoozes_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListSnoozesResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.monitoring.v3.SnoozeServiceProto
.internal_static_google_monitoring_v3_ListSnoozesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.monitoring.v3.SnoozeServiceProto
.internal_static_google_monitoring_v3_ListSnoozesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.monitoring.v3.ListSnoozesResponse.class,
com.google.monitoring.v3.ListSnoozesResponse.Builder.class);
}
public static final int SNOOZES_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.monitoring.v3.Snooze> snoozes_;
/**
*
*
* <pre>
* `Snooze`s matching this list call.
* </pre>
*
* <code>repeated .google.monitoring.v3.Snooze snoozes = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.monitoring.v3.Snooze> getSnoozesList() {
return snoozes_;
}
/**
*
*
* <pre>
* `Snooze`s matching this list call.
* </pre>
*
* <code>repeated .google.monitoring.v3.Snooze snoozes = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.monitoring.v3.SnoozeOrBuilder>
getSnoozesOrBuilderList() {
return snoozes_;
}
/**
*
*
* <pre>
* `Snooze`s matching this list call.
* </pre>
*
* <code>repeated .google.monitoring.v3.Snooze snoozes = 1;</code>
*/
@java.lang.Override
public int getSnoozesCount() {
return snoozes_.size();
}
/**
*
*
* <pre>
* `Snooze`s matching this list call.
* </pre>
*
* <code>repeated .google.monitoring.v3.Snooze snoozes = 1;</code>
*/
@java.lang.Override
public com.google.monitoring.v3.Snooze getSnoozes(int index) {
return snoozes_.get(index);
}
/**
*
*
* <pre>
* `Snooze`s matching this list call.
* </pre>
*
* <code>repeated .google.monitoring.v3.Snooze snoozes = 1;</code>
*/
@java.lang.Override
public com.google.monitoring.v3.SnoozeOrBuilder getSnoozesOrBuilder(int index) {
return snoozes_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Page token for repeated calls to `ListSnoozes`, to fetch additional pages
* of results. If this is empty or missing, there are no more pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* Page token for repeated calls to `ListSnoozes`, to fetch additional pages
* of results. If this is empty or missing, there are no more pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < snoozes_.size(); i++) {
output.writeMessage(1, snoozes_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < snoozes_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, snoozes_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.monitoring.v3.ListSnoozesResponse)) {
return super.equals(obj);
}
com.google.monitoring.v3.ListSnoozesResponse other =
(com.google.monitoring.v3.ListSnoozesResponse) obj;
if (!getSnoozesList().equals(other.getSnoozesList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getSnoozesCount() > 0) {
hash = (37 * hash) + SNOOZES_FIELD_NUMBER;
hash = (53 * hash) + getSnoozesList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.monitoring.v3.ListSnoozesResponse parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.monitoring.v3.ListSnoozesResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.monitoring.v3.ListSnoozesResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.monitoring.v3.ListSnoozesResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.monitoring.v3.ListSnoozesResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.monitoring.v3.ListSnoozesResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.monitoring.v3.ListSnoozesResponse parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.monitoring.v3.ListSnoozesResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.monitoring.v3.ListSnoozesResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.monitoring.v3.ListSnoozesResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.monitoring.v3.ListSnoozesResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.monitoring.v3.ListSnoozesResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.monitoring.v3.ListSnoozesResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The results of a successful `ListSnoozes` call, containing the matching
* `Snooze`s.
* </pre>
*
* Protobuf type {@code google.monitoring.v3.ListSnoozesResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.monitoring.v3.ListSnoozesResponse)
com.google.monitoring.v3.ListSnoozesResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.monitoring.v3.SnoozeServiceProto
.internal_static_google_monitoring_v3_ListSnoozesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.monitoring.v3.SnoozeServiceProto
.internal_static_google_monitoring_v3_ListSnoozesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.monitoring.v3.ListSnoozesResponse.class,
com.google.monitoring.v3.ListSnoozesResponse.Builder.class);
}
// Construct using com.google.monitoring.v3.ListSnoozesResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (snoozesBuilder_ == null) {
snoozes_ = java.util.Collections.emptyList();
} else {
snoozes_ = null;
snoozesBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.monitoring.v3.SnoozeServiceProto
.internal_static_google_monitoring_v3_ListSnoozesResponse_descriptor;
}
@java.lang.Override
public com.google.monitoring.v3.ListSnoozesResponse getDefaultInstanceForType() {
return com.google.monitoring.v3.ListSnoozesResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.monitoring.v3.ListSnoozesResponse build() {
com.google.monitoring.v3.ListSnoozesResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.monitoring.v3.ListSnoozesResponse buildPartial() {
com.google.monitoring.v3.ListSnoozesResponse result =
new com.google.monitoring.v3.ListSnoozesResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(com.google.monitoring.v3.ListSnoozesResponse result) {
if (snoozesBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
snoozes_ = java.util.Collections.unmodifiableList(snoozes_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.snoozes_ = snoozes_;
} else {
result.snoozes_ = snoozesBuilder_.build();
}
}
private void buildPartial0(com.google.monitoring.v3.ListSnoozesResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.monitoring.v3.ListSnoozesResponse) {
return mergeFrom((com.google.monitoring.v3.ListSnoozesResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.monitoring.v3.ListSnoozesResponse other) {
if (other == com.google.monitoring.v3.ListSnoozesResponse.getDefaultInstance()) return this;
if (snoozesBuilder_ == null) {
if (!other.snoozes_.isEmpty()) {
if (snoozes_.isEmpty()) {
snoozes_ = other.snoozes_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureSnoozesIsMutable();
snoozes_.addAll(other.snoozes_);
}
onChanged();
}
} else {
if (!other.snoozes_.isEmpty()) {
if (snoozesBuilder_.isEmpty()) {
snoozesBuilder_.dispose();
snoozesBuilder_ = null;
snoozes_ = other.snoozes_;
bitField0_ = (bitField0_ & ~0x00000001);
snoozesBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getSnoozesFieldBuilder()
: null;
} else {
snoozesBuilder_.addAllMessages(other.snoozes_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.monitoring.v3.Snooze m =
input.readMessage(com.google.monitoring.v3.Snooze.parser(), extensionRegistry);
if (snoozesBuilder_ == null) {
ensureSnoozesIsMutable();
snoozes_.add(m);
} else {
snoozesBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.monitoring.v3.Snooze> snoozes_ =
java.util.Collections.emptyList();
private void ensureSnoozesIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
snoozes_ = new java.util.ArrayList<com.google.monitoring.v3.Snooze>(snoozes_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.monitoring.v3.Snooze,
com.google.monitoring.v3.Snooze.Builder,
com.google.monitoring.v3.SnoozeOrBuilder>
snoozesBuilder_;
/**
*
*
* <pre>
* `Snooze`s matching this list call.
* </pre>
*
* <code>repeated .google.monitoring.v3.Snooze snoozes = 1;</code>
*/
public java.util.List<com.google.monitoring.v3.Snooze> getSnoozesList() {
if (snoozesBuilder_ == null) {
return java.util.Collections.unmodifiableList(snoozes_);
} else {
return snoozesBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* `Snooze`s matching this list call.
* </pre>
*
* <code>repeated .google.monitoring.v3.Snooze snoozes = 1;</code>
*/
public int getSnoozesCount() {
if (snoozesBuilder_ == null) {
return snoozes_.size();
} else {
return snoozesBuilder_.getCount();
}
}
/**
*
*
* <pre>
* `Snooze`s matching this list call.
* </pre>
*
* <code>repeated .google.monitoring.v3.Snooze snoozes = 1;</code>
*/
public com.google.monitoring.v3.Snooze getSnoozes(int index) {
if (snoozesBuilder_ == null) {
return snoozes_.get(index);
} else {
return snoozesBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* `Snooze`s matching this list call.
* </pre>
*
* <code>repeated .google.monitoring.v3.Snooze snoozes = 1;</code>
*/
public Builder setSnoozes(int index, com.google.monitoring.v3.Snooze value) {
if (snoozesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSnoozesIsMutable();
snoozes_.set(index, value);
onChanged();
} else {
snoozesBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* `Snooze`s matching this list call.
* </pre>
*
* <code>repeated .google.monitoring.v3.Snooze snoozes = 1;</code>
*/
public Builder setSnoozes(int index, com.google.monitoring.v3.Snooze.Builder builderForValue) {
if (snoozesBuilder_ == null) {
ensureSnoozesIsMutable();
snoozes_.set(index, builderForValue.build());
onChanged();
} else {
snoozesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* `Snooze`s matching this list call.
* </pre>
*
* <code>repeated .google.monitoring.v3.Snooze snoozes = 1;</code>
*/
public Builder addSnoozes(com.google.monitoring.v3.Snooze value) {
if (snoozesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSnoozesIsMutable();
snoozes_.add(value);
onChanged();
} else {
snoozesBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* `Snooze`s matching this list call.
* </pre>
*
* <code>repeated .google.monitoring.v3.Snooze snoozes = 1;</code>
*/
public Builder addSnoozes(int index, com.google.monitoring.v3.Snooze value) {
if (snoozesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSnoozesIsMutable();
snoozes_.add(index, value);
onChanged();
} else {
snoozesBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* `Snooze`s matching this list call.
* </pre>
*
* <code>repeated .google.monitoring.v3.Snooze snoozes = 1;</code>
*/
public Builder addSnoozes(com.google.monitoring.v3.Snooze.Builder builderForValue) {
if (snoozesBuilder_ == null) {
ensureSnoozesIsMutable();
snoozes_.add(builderForValue.build());
onChanged();
} else {
snoozesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* `Snooze`s matching this list call.
* </pre>
*
* <code>repeated .google.monitoring.v3.Snooze snoozes = 1;</code>
*/
public Builder addSnoozes(int index, com.google.monitoring.v3.Snooze.Builder builderForValue) {
if (snoozesBuilder_ == null) {
ensureSnoozesIsMutable();
snoozes_.add(index, builderForValue.build());
onChanged();
} else {
snoozesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* `Snooze`s matching this list call.
* </pre>
*
* <code>repeated .google.monitoring.v3.Snooze snoozes = 1;</code>
*/
public Builder addAllSnoozes(
java.lang.Iterable<? extends com.google.monitoring.v3.Snooze> values) {
if (snoozesBuilder_ == null) {
ensureSnoozesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, snoozes_);
onChanged();
} else {
snoozesBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* `Snooze`s matching this list call.
* </pre>
*
* <code>repeated .google.monitoring.v3.Snooze snoozes = 1;</code>
*/
public Builder clearSnoozes() {
if (snoozesBuilder_ == null) {
snoozes_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
snoozesBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* `Snooze`s matching this list call.
* </pre>
*
* <code>repeated .google.monitoring.v3.Snooze snoozes = 1;</code>
*/
public Builder removeSnoozes(int index) {
if (snoozesBuilder_ == null) {
ensureSnoozesIsMutable();
snoozes_.remove(index);
onChanged();
} else {
snoozesBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* `Snooze`s matching this list call.
* </pre>
*
* <code>repeated .google.monitoring.v3.Snooze snoozes = 1;</code>
*/
public com.google.monitoring.v3.Snooze.Builder getSnoozesBuilder(int index) {
return getSnoozesFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* `Snooze`s matching this list call.
* </pre>
*
* <code>repeated .google.monitoring.v3.Snooze snoozes = 1;</code>
*/
public com.google.monitoring.v3.SnoozeOrBuilder getSnoozesOrBuilder(int index) {
if (snoozesBuilder_ == null) {
return snoozes_.get(index);
} else {
return snoozesBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* `Snooze`s matching this list call.
* </pre>
*
* <code>repeated .google.monitoring.v3.Snooze snoozes = 1;</code>
*/
public java.util.List<? extends com.google.monitoring.v3.SnoozeOrBuilder>
getSnoozesOrBuilderList() {
if (snoozesBuilder_ != null) {
return snoozesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(snoozes_);
}
}
/**
*
*
* <pre>
* `Snooze`s matching this list call.
* </pre>
*
* <code>repeated .google.monitoring.v3.Snooze snoozes = 1;</code>
*/
public com.google.monitoring.v3.Snooze.Builder addSnoozesBuilder() {
return getSnoozesFieldBuilder()
.addBuilder(com.google.monitoring.v3.Snooze.getDefaultInstance());
}
/**
*
*
* <pre>
* `Snooze`s matching this list call.
* </pre>
*
* <code>repeated .google.monitoring.v3.Snooze snoozes = 1;</code>
*/
public com.google.monitoring.v3.Snooze.Builder addSnoozesBuilder(int index) {
return getSnoozesFieldBuilder()
.addBuilder(index, com.google.monitoring.v3.Snooze.getDefaultInstance());
}
/**
*
*
* <pre>
* `Snooze`s matching this list call.
* </pre>
*
* <code>repeated .google.monitoring.v3.Snooze snoozes = 1;</code>
*/
public java.util.List<com.google.monitoring.v3.Snooze.Builder> getSnoozesBuilderList() {
return getSnoozesFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.monitoring.v3.Snooze,
com.google.monitoring.v3.Snooze.Builder,
com.google.monitoring.v3.SnoozeOrBuilder>
getSnoozesFieldBuilder() {
if (snoozesBuilder_ == null) {
snoozesBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.monitoring.v3.Snooze,
com.google.monitoring.v3.Snooze.Builder,
com.google.monitoring.v3.SnoozeOrBuilder>(
snoozes_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
snoozes_ = null;
}
return snoozesBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Page token for repeated calls to `ListSnoozes`, to fetch additional pages
* of results. If this is empty or missing, there are no more pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Page token for repeated calls to `ListSnoozes`, to fetch additional pages
* of results. If this is empty or missing, there are no more pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Page token for repeated calls to `ListSnoozes`, to fetch additional pages
* of results. If this is empty or missing, there are no more pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Page token for repeated calls to `ListSnoozes`, to fetch additional pages
* of results. If this is empty or missing, there are no more pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Page token for repeated calls to `ListSnoozes`, to fetch additional pages
* of results. If this is empty or missing, there are no more pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.monitoring.v3.ListSnoozesResponse)
}
// @@protoc_insertion_point(class_scope:google.monitoring.v3.ListSnoozesResponse)
private static final com.google.monitoring.v3.ListSnoozesResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.monitoring.v3.ListSnoozesResponse();
}
public static com.google.monitoring.v3.ListSnoozesResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListSnoozesResponse> PARSER =
new com.google.protobuf.AbstractParser<ListSnoozesResponse>() {
@java.lang.Override
public ListSnoozesResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListSnoozesResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListSnoozesResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.monitoring.v3.ListSnoozesResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/tomee | 35,224 | container/openejb-jee-accessors/src/main/java/org/apache/openejb/jee/FacesApplication$JAXB.java | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.openejb.jee;
import java.util.ArrayList;
import java.util.List;
import javax.xml.XMLConstants;
import javax.xml.namespace.QName;
import jakarta.xml.bind.annotation.adapters.CollapsedStringAdapter;
import org.metatype.sxc.jaxb.JAXBObject;
import org.metatype.sxc.jaxb.LifecycleCallback;
import org.metatype.sxc.jaxb.RuntimeContext;
import org.metatype.sxc.util.Attribute;
import org.metatype.sxc.util.XoXMLStreamReader;
import org.metatype.sxc.util.XoXMLStreamWriter;
import static org.apache.openejb.jee.FacesApplicationExtension$JAXB.readFacesApplicationExtension;
import static org.apache.openejb.jee.FacesApplicationExtension$JAXB.writeFacesApplicationExtension;
import static org.apache.openejb.jee.FacesApplicationResourceBundle$JAXB.readFacesApplicationResourceBundle;
import static org.apache.openejb.jee.FacesApplicationResourceBundle$JAXB.writeFacesApplicationResourceBundle;
import static org.apache.openejb.jee.FacesLocaleConfig$JAXB.readFacesLocaleConfig;
import static org.apache.openejb.jee.FacesLocaleConfig$JAXB.writeFacesLocaleConfig;
import static org.apache.openejb.jee.FacesSystemEventListener$JAXB.readFacesSystemEventListener;
import static org.apache.openejb.jee.FacesSystemEventListener$JAXB.writeFacesSystemEventListener;
import static org.apache.openejb.jee.FacesValidator$JAXB.readFacesValidator;
import static org.apache.openejb.jee.FacesValidator$JAXB.writeFacesValidator;
@SuppressWarnings({
"StringEquality"
})
public class FacesApplication$JAXB
extends JAXBObject<FacesApplication>
{
public FacesApplication$JAXB() {
super(FacesApplication.class, null, new QName("http://java.sun.com/xml/ns/javaee".intern(), "faces-config-applicationType".intern()), FacesSystemEventListener$JAXB.class, FacesLocaleConfig$JAXB.class, FacesApplicationResourceBundle$JAXB.class, FacesApplicationExtension$JAXB.class, FacesValidator$JAXB.class);
}
public static FacesApplication readFacesApplication(XoXMLStreamReader reader, RuntimeContext context)
throws Exception
{
return _read(reader, context);
}
public static void writeFacesApplication(XoXMLStreamWriter writer, FacesApplication facesApplication, RuntimeContext context)
throws Exception
{
_write(writer, facesApplication, context);
}
public void write(XoXMLStreamWriter writer, FacesApplication facesApplication, RuntimeContext context)
throws Exception
{
_write(writer, facesApplication, context);
}
public static final FacesApplication _read(XoXMLStreamReader reader, RuntimeContext context)
throws Exception
{
// Check for xsi:nil
if (reader.isXsiNil()) {
return null;
}
if (context == null) {
context = new RuntimeContext();
}
FacesApplication facesApplication = new FacesApplication();
context.beforeUnmarshal(facesApplication, LifecycleCallback.NONE);
List<String> actionListener = null;
List<String> defaultRenderKitId = null;
List<String> messageBundle = null;
List<String> navigationHandler = null;
List<String> viewHandler = null;
List<String> stateManager = null;
List<String> elResolver = null;
List<String> propertyResolver = null;
List<String> variableResolver = null;
List<String> resourceHandler = null;
List<FacesSystemEventListener> systemEventListener = null;
List<FacesLocaleConfig> localeConfig = null;
List<FacesApplicationExtension> applicationExtension = null;
List<FacesValidator> defaultValidators = null;
List<Object> others = null;
// Check xsi:type
QName xsiType = reader.getXsiType();
if (xsiType!= null) {
if (("faces-config-applicationType"!= xsiType.getLocalPart())||("http://java.sun.com/xml/ns/javaee"!= xsiType.getNamespaceURI())) {
return context.unexpectedXsiType(reader, FacesApplication.class);
}
}
// Read attributes
for (Attribute attribute: reader.getAttributes()) {
if (("id" == attribute.getLocalName())&&(("" == attribute.getNamespace())||(attribute.getNamespace() == null))) {
// ATTRIBUTE: id
String id = Adapters.collapsedStringAdapterAdapter.unmarshal(attribute.getValue());
context.addXmlId(reader, id, facesApplication);
facesApplication.id = id;
} else if (XMLConstants.W3C_XML_SCHEMA_INSTANCE_NS_URI!= attribute.getNamespace()) {
context.unexpectedAttribute(attribute, new QName("", "id"));
}
}
// Read elements
for (XoXMLStreamReader elementReader: reader.getChildElements()) {
if (("action-listener" == elementReader.getLocalName())&&("http://java.sun.com/xml/ns/javaee" == elementReader.getNamespaceURI())) {
// ELEMENT: actionListener
String actionListenerItemRaw = elementReader.getElementText();
String actionListenerItem;
try {
actionListenerItem = Adapters.collapsedStringAdapterAdapter.unmarshal(actionListenerItemRaw);
} catch (Exception e) {
context.xmlAdapterError(elementReader, CollapsedStringAdapter.class, String.class, String.class, e);
continue;
}
if (actionListener == null) {
actionListener = facesApplication.actionListener;
if (actionListener!= null) {
actionListener.clear();
} else {
actionListener = new ArrayList<>();
}
}
actionListener.add(actionListenerItem);
} else if (("default-render-kit-id" == elementReader.getLocalName())&&("http://java.sun.com/xml/ns/javaee" == elementReader.getNamespaceURI())) {
// ELEMENT: defaultRenderKitId
String defaultRenderKitIdItemRaw = elementReader.getElementText();
String defaultRenderKitIdItem;
try {
defaultRenderKitIdItem = Adapters.collapsedStringAdapterAdapter.unmarshal(defaultRenderKitIdItemRaw);
} catch (Exception e) {
context.xmlAdapterError(elementReader, CollapsedStringAdapter.class, String.class, String.class, e);
continue;
}
if (defaultRenderKitId == null) {
defaultRenderKitId = facesApplication.defaultRenderKitId;
if (defaultRenderKitId!= null) {
defaultRenderKitId.clear();
} else {
defaultRenderKitId = new ArrayList<>();
}
}
defaultRenderKitId.add(defaultRenderKitIdItem);
} else if (("message-bundle" == elementReader.getLocalName())&&("http://java.sun.com/xml/ns/javaee" == elementReader.getNamespaceURI())) {
// ELEMENT: messageBundle
String messageBundleItemRaw = elementReader.getElementText();
String messageBundleItem;
try {
messageBundleItem = Adapters.collapsedStringAdapterAdapter.unmarshal(messageBundleItemRaw);
} catch (Exception e) {
context.xmlAdapterError(elementReader, CollapsedStringAdapter.class, String.class, String.class, e);
continue;
}
if (messageBundle == null) {
messageBundle = facesApplication.messageBundle;
if (messageBundle!= null) {
messageBundle.clear();
} else {
messageBundle = new ArrayList<>();
}
}
messageBundle.add(messageBundleItem);
} else if (("navigation-handler" == elementReader.getLocalName())&&("http://java.sun.com/xml/ns/javaee" == elementReader.getNamespaceURI())) {
// ELEMENT: navigationHandler
String navigationHandlerItemRaw = elementReader.getElementText();
String navigationHandlerItem;
try {
navigationHandlerItem = Adapters.collapsedStringAdapterAdapter.unmarshal(navigationHandlerItemRaw);
} catch (Exception e) {
context.xmlAdapterError(elementReader, CollapsedStringAdapter.class, String.class, String.class, e);
continue;
}
if (navigationHandler == null) {
navigationHandler = facesApplication.navigationHandler;
if (navigationHandler!= null) {
navigationHandler.clear();
} else {
navigationHandler = new ArrayList<>();
}
}
navigationHandler.add(navigationHandlerItem);
} else if (("view-handler" == elementReader.getLocalName())&&("http://java.sun.com/xml/ns/javaee" == elementReader.getNamespaceURI())) {
// ELEMENT: viewHandler
String viewHandlerItemRaw = elementReader.getElementText();
String viewHandlerItem;
try {
viewHandlerItem = Adapters.collapsedStringAdapterAdapter.unmarshal(viewHandlerItemRaw);
} catch (Exception e) {
context.xmlAdapterError(elementReader, CollapsedStringAdapter.class, String.class, String.class, e);
continue;
}
if (viewHandler == null) {
viewHandler = facesApplication.viewHandler;
if (viewHandler!= null) {
viewHandler.clear();
} else {
viewHandler = new ArrayList<>();
}
}
viewHandler.add(viewHandlerItem);
} else if (("state-manager" == elementReader.getLocalName())&&("http://java.sun.com/xml/ns/javaee" == elementReader.getNamespaceURI())) {
// ELEMENT: stateManager
String stateManagerItemRaw = elementReader.getElementText();
String stateManagerItem;
try {
stateManagerItem = Adapters.collapsedStringAdapterAdapter.unmarshal(stateManagerItemRaw);
} catch (Exception e) {
context.xmlAdapterError(elementReader, CollapsedStringAdapter.class, String.class, String.class, e);
continue;
}
if (stateManager == null) {
stateManager = facesApplication.stateManager;
if (stateManager!= null) {
stateManager.clear();
} else {
stateManager = new ArrayList<>();
}
}
stateManager.add(stateManagerItem);
} else if (("el-resolver" == elementReader.getLocalName())&&("http://java.sun.com/xml/ns/javaee" == elementReader.getNamespaceURI())) {
// ELEMENT: elResolver
String elResolverItemRaw = elementReader.getElementText();
String elResolverItem;
try {
elResolverItem = Adapters.collapsedStringAdapterAdapter.unmarshal(elResolverItemRaw);
} catch (Exception e) {
context.xmlAdapterError(elementReader, CollapsedStringAdapter.class, String.class, String.class, e);
continue;
}
if (elResolver == null) {
elResolver = facesApplication.elResolver;
if (elResolver!= null) {
elResolver.clear();
} else {
elResolver = new ArrayList<>();
}
}
elResolver.add(elResolverItem);
} else if (("property-resolver" == elementReader.getLocalName())&&("http://java.sun.com/xml/ns/javaee" == elementReader.getNamespaceURI())) {
// ELEMENT: propertyResolver
String propertyResolverItemRaw = elementReader.getElementText();
String propertyResolverItem;
try {
propertyResolverItem = Adapters.collapsedStringAdapterAdapter.unmarshal(propertyResolverItemRaw);
} catch (Exception e) {
context.xmlAdapterError(elementReader, CollapsedStringAdapter.class, String.class, String.class, e);
continue;
}
if (propertyResolver == null) {
propertyResolver = facesApplication.propertyResolver;
if (propertyResolver!= null) {
propertyResolver.clear();
} else {
propertyResolver = new ArrayList<>();
}
}
propertyResolver.add(propertyResolverItem);
} else if (("variable-resolver" == elementReader.getLocalName())&&("http://java.sun.com/xml/ns/javaee" == elementReader.getNamespaceURI())) {
// ELEMENT: variableResolver
String variableResolverItemRaw = elementReader.getElementText();
String variableResolverItem;
try {
variableResolverItem = Adapters.collapsedStringAdapterAdapter.unmarshal(variableResolverItemRaw);
} catch (Exception e) {
context.xmlAdapterError(elementReader, CollapsedStringAdapter.class, String.class, String.class, e);
continue;
}
if (variableResolver == null) {
variableResolver = facesApplication.variableResolver;
if (variableResolver!= null) {
variableResolver.clear();
} else {
variableResolver = new ArrayList<>();
}
}
variableResolver.add(variableResolverItem);
} else if (("resource-handler" == elementReader.getLocalName())&&("http://java.sun.com/xml/ns/javaee" == elementReader.getNamespaceURI())) {
// ELEMENT: resourceHandler
String resourceHandlerItemRaw = elementReader.getElementText();
String resourceHandlerItem;
try {
resourceHandlerItem = Adapters.collapsedStringAdapterAdapter.unmarshal(resourceHandlerItemRaw);
} catch (Exception e) {
context.xmlAdapterError(elementReader, CollapsedStringAdapter.class, String.class, String.class, e);
continue;
}
if (resourceHandler == null) {
resourceHandler = facesApplication.resourceHandler;
if (resourceHandler!= null) {
resourceHandler.clear();
} else {
resourceHandler = new ArrayList<>();
}
}
resourceHandler.add(resourceHandlerItem);
} else if (("system-event-listener" == elementReader.getLocalName())&&("http://java.sun.com/xml/ns/javaee" == elementReader.getNamespaceURI())) {
// ELEMENT: systemEventListener
FacesSystemEventListener systemEventListenerItem = readFacesSystemEventListener(elementReader, context);
if (systemEventListener == null) {
systemEventListener = facesApplication.systemEventListener;
if (systemEventListener!= null) {
systemEventListener.clear();
} else {
systemEventListener = new ArrayList<>();
}
}
systemEventListener.add(systemEventListenerItem);
} else if (("locale-config" == elementReader.getLocalName())&&("http://java.sun.com/xml/ns/javaee" == elementReader.getNamespaceURI())) {
// ELEMENT: localeConfig
FacesLocaleConfig localeConfigItem = readFacesLocaleConfig(elementReader, context);
if (localeConfig == null) {
localeConfig = facesApplication.localeConfig;
if (localeConfig!= null) {
localeConfig.clear();
} else {
localeConfig = new ArrayList<>();
}
}
localeConfig.add(localeConfigItem);
} else if (("resource-bundle" == elementReader.getLocalName())&&("http://java.sun.com/xml/ns/javaee" == elementReader.getNamespaceURI())) {
// ELEMENT: resourceBundle
FacesApplicationResourceBundle resourceBundle = readFacesApplicationResourceBundle(elementReader, context);
facesApplication.resourceBundle = resourceBundle;
} else if (("application-extension" == elementReader.getLocalName())&&("http://java.sun.com/xml/ns/javaee" == elementReader.getNamespaceURI())) {
// ELEMENT: applicationExtension
FacesApplicationExtension applicationExtensionItem = readFacesApplicationExtension(elementReader, context);
if (applicationExtension == null) {
applicationExtension = facesApplication.applicationExtension;
if (applicationExtension!= null) {
applicationExtension.clear();
} else {
applicationExtension = new ArrayList<>();
}
}
applicationExtension.add(applicationExtensionItem);
} else if (("default-validators" == elementReader.getLocalName())&&("http://java.sun.com/xml/ns/javaee" == elementReader.getNamespaceURI())) {
// ELEMENT: defaultValidators
FacesValidator defaultValidatorsItem = readFacesValidator(elementReader, context);
if (defaultValidators == null) {
defaultValidators = facesApplication.defaultValidators;
if (defaultValidators!= null) {
defaultValidators.clear();
} else {
defaultValidators = new ArrayList<>();
}
}
defaultValidators.add(defaultValidatorsItem);
} else {
// ELEMENT_REF: others
if (others == null) {
others = facesApplication.others;
if (others!= null) {
others.clear();
} else {
others = new ArrayList<>();
}
}
others.add(context.readXmlAny(elementReader, Object.class, false));
}
}
if (actionListener!= null) {
facesApplication.actionListener = actionListener;
}
if (defaultRenderKitId!= null) {
facesApplication.defaultRenderKitId = defaultRenderKitId;
}
if (messageBundle!= null) {
facesApplication.messageBundle = messageBundle;
}
if (navigationHandler!= null) {
facesApplication.navigationHandler = navigationHandler;
}
if (viewHandler!= null) {
facesApplication.viewHandler = viewHandler;
}
if (stateManager!= null) {
facesApplication.stateManager = stateManager;
}
if (elResolver!= null) {
facesApplication.elResolver = elResolver;
}
if (propertyResolver!= null) {
facesApplication.propertyResolver = propertyResolver;
}
if (variableResolver!= null) {
facesApplication.variableResolver = variableResolver;
}
if (resourceHandler!= null) {
facesApplication.resourceHandler = resourceHandler;
}
if (systemEventListener!= null) {
facesApplication.systemEventListener = systemEventListener;
}
if (localeConfig!= null) {
facesApplication.localeConfig = localeConfig;
}
if (applicationExtension!= null) {
facesApplication.applicationExtension = applicationExtension;
}
if (defaultValidators!= null) {
facesApplication.defaultValidators = defaultValidators;
}
if (others!= null) {
facesApplication.others = others;
}
context.afterUnmarshal(facesApplication, LifecycleCallback.NONE);
return facesApplication;
}
public final FacesApplication read(XoXMLStreamReader reader, RuntimeContext context)
throws Exception
{
return _read(reader, context);
}
public static final void _write(XoXMLStreamWriter writer, FacesApplication facesApplication, RuntimeContext context)
throws Exception
{
if (facesApplication == null) {
writer.writeXsiNil();
return ;
}
if (context == null) {
context = new RuntimeContext();
}
String prefix = writer.getUniquePrefix("http://java.sun.com/xml/ns/javaee");
if (FacesApplication.class!= facesApplication.getClass()) {
context.unexpectedSubclass(writer, facesApplication, FacesApplication.class);
return ;
}
context.beforeMarshal(facesApplication, LifecycleCallback.NONE);
// ATTRIBUTE: id
String idRaw = facesApplication.id;
if (idRaw!= null) {
String id = null;
try {
id = Adapters.collapsedStringAdapterAdapter.marshal(idRaw);
} catch (Exception e) {
context.xmlAdapterError(facesApplication, "id", CollapsedStringAdapter.class, String.class, String.class, e);
}
writer.writeAttribute("", "", "id", id);
}
// ELEMENT: actionListener
List<String> actionListenerRaw = facesApplication.actionListener;
if (actionListenerRaw!= null) {
for (String actionListenerItem: actionListenerRaw) {
String actionListener = null;
try {
actionListener = Adapters.collapsedStringAdapterAdapter.marshal(actionListenerItem);
} catch (Exception e) {
context.xmlAdapterError(facesApplication, "actionListener", CollapsedStringAdapter.class, List.class, List.class, e);
}
if (actionListener!= null) {
writer.writeStartElement(prefix, "action-listener", "http://java.sun.com/xml/ns/javaee");
writer.writeCharacters(actionListener);
writer.writeEndElement();
}
}
}
// ELEMENT: defaultRenderKitId
List<String> defaultRenderKitIdRaw = facesApplication.defaultRenderKitId;
if (defaultRenderKitIdRaw!= null) {
for (String defaultRenderKitIdItem: defaultRenderKitIdRaw) {
String defaultRenderKitId = null;
try {
defaultRenderKitId = Adapters.collapsedStringAdapterAdapter.marshal(defaultRenderKitIdItem);
} catch (Exception e) {
context.xmlAdapterError(facesApplication, "defaultRenderKitId", CollapsedStringAdapter.class, List.class, List.class, e);
}
if (defaultRenderKitId!= null) {
writer.writeStartElement(prefix, "default-render-kit-id", "http://java.sun.com/xml/ns/javaee");
writer.writeCharacters(defaultRenderKitId);
writer.writeEndElement();
}
}
}
// ELEMENT: messageBundle
List<String> messageBundleRaw = facesApplication.messageBundle;
if (messageBundleRaw!= null) {
for (String messageBundleItem: messageBundleRaw) {
String messageBundle = null;
try {
messageBundle = Adapters.collapsedStringAdapterAdapter.marshal(messageBundleItem);
} catch (Exception e) {
context.xmlAdapterError(facesApplication, "messageBundle", CollapsedStringAdapter.class, List.class, List.class, e);
}
if (messageBundle!= null) {
writer.writeStartElement(prefix, "message-bundle", "http://java.sun.com/xml/ns/javaee");
writer.writeCharacters(messageBundle);
writer.writeEndElement();
}
}
}
// ELEMENT: navigationHandler
List<String> navigationHandlerRaw = facesApplication.navigationHandler;
if (navigationHandlerRaw!= null) {
for (String navigationHandlerItem: navigationHandlerRaw) {
String navigationHandler = null;
try {
navigationHandler = Adapters.collapsedStringAdapterAdapter.marshal(navigationHandlerItem);
} catch (Exception e) {
context.xmlAdapterError(facesApplication, "navigationHandler", CollapsedStringAdapter.class, List.class, List.class, e);
}
if (navigationHandler!= null) {
writer.writeStartElement(prefix, "navigation-handler", "http://java.sun.com/xml/ns/javaee");
writer.writeCharacters(navigationHandler);
writer.writeEndElement();
}
}
}
// ELEMENT: viewHandler
List<String> viewHandlerRaw = facesApplication.viewHandler;
if (viewHandlerRaw!= null) {
for (String viewHandlerItem: viewHandlerRaw) {
String viewHandler = null;
try {
viewHandler = Adapters.collapsedStringAdapterAdapter.marshal(viewHandlerItem);
} catch (Exception e) {
context.xmlAdapterError(facesApplication, "viewHandler", CollapsedStringAdapter.class, List.class, List.class, e);
}
if (viewHandler!= null) {
writer.writeStartElement(prefix, "view-handler", "http://java.sun.com/xml/ns/javaee");
writer.writeCharacters(viewHandler);
writer.writeEndElement();
}
}
}
// ELEMENT: stateManager
List<String> stateManagerRaw = facesApplication.stateManager;
if (stateManagerRaw!= null) {
for (String stateManagerItem: stateManagerRaw) {
String stateManager = null;
try {
stateManager = Adapters.collapsedStringAdapterAdapter.marshal(stateManagerItem);
} catch (Exception e) {
context.xmlAdapterError(facesApplication, "stateManager", CollapsedStringAdapter.class, List.class, List.class, e);
}
if (stateManager!= null) {
writer.writeStartElement(prefix, "state-manager", "http://java.sun.com/xml/ns/javaee");
writer.writeCharacters(stateManager);
writer.writeEndElement();
}
}
}
// ELEMENT: elResolver
List<String> elResolverRaw = facesApplication.elResolver;
if (elResolverRaw!= null) {
for (String elResolverItem: elResolverRaw) {
String elResolver = null;
try {
elResolver = Adapters.collapsedStringAdapterAdapter.marshal(elResolverItem);
} catch (Exception e) {
context.xmlAdapterError(facesApplication, "elResolver", CollapsedStringAdapter.class, List.class, List.class, e);
}
if (elResolver!= null) {
writer.writeStartElement(prefix, "el-resolver", "http://java.sun.com/xml/ns/javaee");
writer.writeCharacters(elResolver);
writer.writeEndElement();
}
}
}
// ELEMENT: propertyResolver
List<String> propertyResolverRaw = facesApplication.propertyResolver;
if (propertyResolverRaw!= null) {
for (String propertyResolverItem: propertyResolverRaw) {
String propertyResolver = null;
try {
propertyResolver = Adapters.collapsedStringAdapterAdapter.marshal(propertyResolverItem);
} catch (Exception e) {
context.xmlAdapterError(facesApplication, "propertyResolver", CollapsedStringAdapter.class, List.class, List.class, e);
}
if (propertyResolver!= null) {
writer.writeStartElement(prefix, "property-resolver", "http://java.sun.com/xml/ns/javaee");
writer.writeCharacters(propertyResolver);
writer.writeEndElement();
}
}
}
// ELEMENT: variableResolver
List<String> variableResolverRaw = facesApplication.variableResolver;
if (variableResolverRaw!= null) {
for (String variableResolverItem: variableResolverRaw) {
String variableResolver = null;
try {
variableResolver = Adapters.collapsedStringAdapterAdapter.marshal(variableResolverItem);
} catch (Exception e) {
context.xmlAdapterError(facesApplication, "variableResolver", CollapsedStringAdapter.class, List.class, List.class, e);
}
if (variableResolver!= null) {
writer.writeStartElement(prefix, "variable-resolver", "http://java.sun.com/xml/ns/javaee");
writer.writeCharacters(variableResolver);
writer.writeEndElement();
}
}
}
// ELEMENT: resourceHandler
List<String> resourceHandlerRaw = facesApplication.resourceHandler;
if (resourceHandlerRaw!= null) {
for (String resourceHandlerItem: resourceHandlerRaw) {
String resourceHandler = null;
try {
resourceHandler = Adapters.collapsedStringAdapterAdapter.marshal(resourceHandlerItem);
} catch (Exception e) {
context.xmlAdapterError(facesApplication, "resourceHandler", CollapsedStringAdapter.class, List.class, List.class, e);
}
if (resourceHandler!= null) {
writer.writeStartElement(prefix, "resource-handler", "http://java.sun.com/xml/ns/javaee");
writer.writeCharacters(resourceHandler);
writer.writeEndElement();
}
}
}
// ELEMENT: systemEventListener
List<FacesSystemEventListener> systemEventListener = facesApplication.systemEventListener;
if (systemEventListener!= null) {
for (FacesSystemEventListener systemEventListenerItem: systemEventListener) {
if (systemEventListenerItem!= null) {
writer.writeStartElement(prefix, "system-event-listener", "http://java.sun.com/xml/ns/javaee");
writeFacesSystemEventListener(writer, systemEventListenerItem, context);
writer.writeEndElement();
}
}
}
// ELEMENT: localeConfig
List<FacesLocaleConfig> localeConfig = facesApplication.localeConfig;
if (localeConfig!= null) {
for (FacesLocaleConfig localeConfigItem: localeConfig) {
if (localeConfigItem!= null) {
writer.writeStartElement(prefix, "locale-config", "http://java.sun.com/xml/ns/javaee");
writeFacesLocaleConfig(writer, localeConfigItem, context);
writer.writeEndElement();
}
}
}
// ELEMENT: resourceBundle
FacesApplicationResourceBundle resourceBundle = facesApplication.resourceBundle;
if (resourceBundle!= null) {
writer.writeStartElement(prefix, "resource-bundle", "http://java.sun.com/xml/ns/javaee");
writeFacesApplicationResourceBundle(writer, resourceBundle, context);
writer.writeEndElement();
} else {
context.unexpectedNullValue(facesApplication, "resourceBundle");
}
// ELEMENT: applicationExtension
List<FacesApplicationExtension> applicationExtension = facesApplication.applicationExtension;
if (applicationExtension!= null) {
for (FacesApplicationExtension applicationExtensionItem: applicationExtension) {
if (applicationExtensionItem!= null) {
writer.writeStartElement(prefix, "application-extension", "http://java.sun.com/xml/ns/javaee");
writeFacesApplicationExtension(writer, applicationExtensionItem, context);
writer.writeEndElement();
}
}
}
// ELEMENT: defaultValidators
List<FacesValidator> defaultValidators = facesApplication.defaultValidators;
if (defaultValidators!= null) {
for (FacesValidator defaultValidatorsItem: defaultValidators) {
if (defaultValidatorsItem!= null) {
writer.writeStartElement(prefix, "default-validators", "http://java.sun.com/xml/ns/javaee");
writeFacesValidator(writer, defaultValidatorsItem, context);
writer.writeEndElement();
}
}
}
// ELEMENT_REF: others
List<Object> others = facesApplication.others;
if (others!= null) {
for (Object othersItem: others) {
context.writeXmlAny(writer, facesApplication, "others", othersItem);
}
}
context.afterMarshal(facesApplication, LifecycleCallback.NONE);
}
}
|
googleapis/google-cloud-java | 34,938 | java-aiplatform/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/schema/predict/prediction/TabularClassificationPredictionResult.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/aiplatform/v1beta1/schema/predict/prediction/tabular_classification.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.aiplatform.v1beta1.schema.predict.prediction;
/**
*
*
* <pre>
* Prediction output format for Tabular Classification.
* </pre>
*
* Protobuf type {@code
* google.cloud.aiplatform.v1beta1.schema.predict.prediction.TabularClassificationPredictionResult}
*/
public final class TabularClassificationPredictionResult
extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1beta1.schema.predict.prediction.TabularClassificationPredictionResult)
TabularClassificationPredictionResultOrBuilder {
private static final long serialVersionUID = 0L;
// Use TabularClassificationPredictionResult.newBuilder() to construct.
private TabularClassificationPredictionResult(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private TabularClassificationPredictionResult() {
classes_ = com.google.protobuf.LazyStringArrayList.emptyList();
scores_ = emptyFloatList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new TabularClassificationPredictionResult();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1beta1.schema.predict.prediction
.TabularClassificationPredictionResultProto
.internal_static_google_cloud_aiplatform_v1beta1_schema_predict_prediction_TabularClassificationPredictionResult_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1beta1.schema.predict.prediction
.TabularClassificationPredictionResultProto
.internal_static_google_cloud_aiplatform_v1beta1_schema_predict_prediction_TabularClassificationPredictionResult_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1beta1.schema.predict.prediction
.TabularClassificationPredictionResult.class,
com.google.cloud.aiplatform.v1beta1.schema.predict.prediction
.TabularClassificationPredictionResult.Builder.class);
}
public static final int CLASSES_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private com.google.protobuf.LazyStringArrayList classes_ =
com.google.protobuf.LazyStringArrayList.emptyList();
/**
*
*
* <pre>
* The name of the classes being classified, contains all possible values of
* the target column.
* </pre>
*
* <code>repeated string classes = 1;</code>
*
* @return A list containing the classes.
*/
public com.google.protobuf.ProtocolStringList getClassesList() {
return classes_;
}
/**
*
*
* <pre>
* The name of the classes being classified, contains all possible values of
* the target column.
* </pre>
*
* <code>repeated string classes = 1;</code>
*
* @return The count of classes.
*/
public int getClassesCount() {
return classes_.size();
}
/**
*
*
* <pre>
* The name of the classes being classified, contains all possible values of
* the target column.
* </pre>
*
* <code>repeated string classes = 1;</code>
*
* @param index The index of the element to return.
* @return The classes at the given index.
*/
public java.lang.String getClasses(int index) {
return classes_.get(index);
}
/**
*
*
* <pre>
* The name of the classes being classified, contains all possible values of
* the target column.
* </pre>
*
* <code>repeated string classes = 1;</code>
*
* @param index The index of the value to return.
* @return The bytes of the classes at the given index.
*/
public com.google.protobuf.ByteString getClassesBytes(int index) {
return classes_.getByteString(index);
}
public static final int SCORES_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private com.google.protobuf.Internal.FloatList scores_ = emptyFloatList();
/**
*
*
* <pre>
* The model's confidence in each class being correct, higher
* value means higher confidence. The N-th score corresponds to
* the N-th class in classes.
* </pre>
*
* <code>repeated float scores = 2;</code>
*
* @return A list containing the scores.
*/
@java.lang.Override
public java.util.List<java.lang.Float> getScoresList() {
return scores_;
}
/**
*
*
* <pre>
* The model's confidence in each class being correct, higher
* value means higher confidence. The N-th score corresponds to
* the N-th class in classes.
* </pre>
*
* <code>repeated float scores = 2;</code>
*
* @return The count of scores.
*/
public int getScoresCount() {
return scores_.size();
}
/**
*
*
* <pre>
* The model's confidence in each class being correct, higher
* value means higher confidence. The N-th score corresponds to
* the N-th class in classes.
* </pre>
*
* <code>repeated float scores = 2;</code>
*
* @param index The index of the element to return.
* @return The scores at the given index.
*/
public float getScores(int index) {
return scores_.getFloat(index);
}
private int scoresMemoizedSerializedSize = -1;
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
getSerializedSize();
for (int i = 0; i < classes_.size(); i++) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, classes_.getRaw(i));
}
if (getScoresList().size() > 0) {
output.writeUInt32NoTag(18);
output.writeUInt32NoTag(scoresMemoizedSerializedSize);
}
for (int i = 0; i < scores_.size(); i++) {
output.writeFloatNoTag(scores_.getFloat(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
{
int dataSize = 0;
for (int i = 0; i < classes_.size(); i++) {
dataSize += computeStringSizeNoTag(classes_.getRaw(i));
}
size += dataSize;
size += 1 * getClassesList().size();
}
{
int dataSize = 0;
dataSize = 4 * getScoresList().size();
size += dataSize;
if (!getScoresList().isEmpty()) {
size += 1;
size += com.google.protobuf.CodedOutputStream.computeInt32SizeNoTag(dataSize);
}
scoresMemoizedSerializedSize = dataSize;
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj
instanceof
com.google.cloud.aiplatform.v1beta1.schema.predict.prediction
.TabularClassificationPredictionResult)) {
return super.equals(obj);
}
com.google.cloud.aiplatform.v1beta1.schema.predict.prediction
.TabularClassificationPredictionResult
other =
(com.google.cloud.aiplatform.v1beta1.schema.predict.prediction
.TabularClassificationPredictionResult)
obj;
if (!getClassesList().equals(other.getClassesList())) return false;
if (!getScoresList().equals(other.getScoresList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getClassesCount() > 0) {
hash = (37 * hash) + CLASSES_FIELD_NUMBER;
hash = (53 * hash) + getClassesList().hashCode();
}
if (getScoresCount() > 0) {
hash = (37 * hash) + SCORES_FIELD_NUMBER;
hash = (53 * hash) + getScoresList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.aiplatform.v1beta1.schema.predict.prediction
.TabularClassificationPredictionResult
parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.schema.predict.prediction
.TabularClassificationPredictionResult
parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.schema.predict.prediction
.TabularClassificationPredictionResult
parseFrom(com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.schema.predict.prediction
.TabularClassificationPredictionResult
parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.schema.predict.prediction
.TabularClassificationPredictionResult
parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.schema.predict.prediction
.TabularClassificationPredictionResult
parseFrom(byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.schema.predict.prediction
.TabularClassificationPredictionResult
parseFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.schema.predict.prediction
.TabularClassificationPredictionResult
parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.schema.predict.prediction
.TabularClassificationPredictionResult
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.schema.predict.prediction
.TabularClassificationPredictionResult
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.schema.predict.prediction
.TabularClassificationPredictionResult
parseFrom(com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.schema.predict.prediction
.TabularClassificationPredictionResult
parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.aiplatform.v1beta1.schema.predict.prediction
.TabularClassificationPredictionResult
prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Prediction output format for Tabular Classification.
* </pre>
*
* Protobuf type {@code
* google.cloud.aiplatform.v1beta1.schema.predict.prediction.TabularClassificationPredictionResult}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1beta1.schema.predict.prediction.TabularClassificationPredictionResult)
com.google.cloud.aiplatform.v1beta1.schema.predict.prediction
.TabularClassificationPredictionResultOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1beta1.schema.predict.prediction
.TabularClassificationPredictionResultProto
.internal_static_google_cloud_aiplatform_v1beta1_schema_predict_prediction_TabularClassificationPredictionResult_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1beta1.schema.predict.prediction
.TabularClassificationPredictionResultProto
.internal_static_google_cloud_aiplatform_v1beta1_schema_predict_prediction_TabularClassificationPredictionResult_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1beta1.schema.predict.prediction
.TabularClassificationPredictionResult.class,
com.google.cloud.aiplatform.v1beta1.schema.predict.prediction
.TabularClassificationPredictionResult.Builder.class);
}
// Construct using
// com.google.cloud.aiplatform.v1beta1.schema.predict.prediction.TabularClassificationPredictionResult.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
classes_ = com.google.protobuf.LazyStringArrayList.emptyList();
scores_ = emptyFloatList();
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.aiplatform.v1beta1.schema.predict.prediction
.TabularClassificationPredictionResultProto
.internal_static_google_cloud_aiplatform_v1beta1_schema_predict_prediction_TabularClassificationPredictionResult_descriptor;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.schema.predict.prediction
.TabularClassificationPredictionResult
getDefaultInstanceForType() {
return com.google.cloud.aiplatform.v1beta1.schema.predict.prediction
.TabularClassificationPredictionResult.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.schema.predict.prediction
.TabularClassificationPredictionResult
build() {
com.google.cloud.aiplatform.v1beta1.schema.predict.prediction
.TabularClassificationPredictionResult
result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.schema.predict.prediction
.TabularClassificationPredictionResult
buildPartial() {
com.google.cloud.aiplatform.v1beta1.schema.predict.prediction
.TabularClassificationPredictionResult
result =
new com.google.cloud.aiplatform.v1beta1.schema.predict.prediction
.TabularClassificationPredictionResult(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.aiplatform.v1beta1.schema.predict.prediction
.TabularClassificationPredictionResult
result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
classes_.makeImmutable();
result.classes_ = classes_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
scores_.makeImmutable();
result.scores_ = scores_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other
instanceof
com.google.cloud.aiplatform.v1beta1.schema.predict.prediction
.TabularClassificationPredictionResult) {
return mergeFrom(
(com.google.cloud.aiplatform.v1beta1.schema.predict.prediction
.TabularClassificationPredictionResult)
other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.aiplatform.v1beta1.schema.predict.prediction
.TabularClassificationPredictionResult
other) {
if (other
== com.google.cloud.aiplatform.v1beta1.schema.predict.prediction
.TabularClassificationPredictionResult.getDefaultInstance()) return this;
if (!other.classes_.isEmpty()) {
if (classes_.isEmpty()) {
classes_ = other.classes_;
bitField0_ |= 0x00000001;
} else {
ensureClassesIsMutable();
classes_.addAll(other.classes_);
}
onChanged();
}
if (!other.scores_.isEmpty()) {
if (scores_.isEmpty()) {
scores_ = other.scores_;
scores_.makeImmutable();
bitField0_ |= 0x00000002;
} else {
ensureScoresIsMutable();
scores_.addAll(other.scores_);
}
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
java.lang.String s = input.readStringRequireUtf8();
ensureClassesIsMutable();
classes_.add(s);
break;
} // case 10
case 21:
{
float v = input.readFloat();
ensureScoresIsMutable();
scores_.addFloat(v);
break;
} // case 21
case 18:
{
int length = input.readRawVarint32();
int limit = input.pushLimit(length);
int alloc = length > 4096 ? 4096 : length;
ensureScoresIsMutable(alloc / 4);
while (input.getBytesUntilLimit() > 0) {
scores_.addFloat(input.readFloat());
}
input.popLimit(limit);
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.protobuf.LazyStringArrayList classes_ =
com.google.protobuf.LazyStringArrayList.emptyList();
private void ensureClassesIsMutable() {
if (!classes_.isModifiable()) {
classes_ = new com.google.protobuf.LazyStringArrayList(classes_);
}
bitField0_ |= 0x00000001;
}
/**
*
*
* <pre>
* The name of the classes being classified, contains all possible values of
* the target column.
* </pre>
*
* <code>repeated string classes = 1;</code>
*
* @return A list containing the classes.
*/
public com.google.protobuf.ProtocolStringList getClassesList() {
classes_.makeImmutable();
return classes_;
}
/**
*
*
* <pre>
* The name of the classes being classified, contains all possible values of
* the target column.
* </pre>
*
* <code>repeated string classes = 1;</code>
*
* @return The count of classes.
*/
public int getClassesCount() {
return classes_.size();
}
/**
*
*
* <pre>
* The name of the classes being classified, contains all possible values of
* the target column.
* </pre>
*
* <code>repeated string classes = 1;</code>
*
* @param index The index of the element to return.
* @return The classes at the given index.
*/
public java.lang.String getClasses(int index) {
return classes_.get(index);
}
/**
*
*
* <pre>
* The name of the classes being classified, contains all possible values of
* the target column.
* </pre>
*
* <code>repeated string classes = 1;</code>
*
* @param index The index of the value to return.
* @return The bytes of the classes at the given index.
*/
public com.google.protobuf.ByteString getClassesBytes(int index) {
return classes_.getByteString(index);
}
/**
*
*
* <pre>
* The name of the classes being classified, contains all possible values of
* the target column.
* </pre>
*
* <code>repeated string classes = 1;</code>
*
* @param index The index to set the value at.
* @param value The classes to set.
* @return This builder for chaining.
*/
public Builder setClasses(int index, java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureClassesIsMutable();
classes_.set(index, value);
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* The name of the classes being classified, contains all possible values of
* the target column.
* </pre>
*
* <code>repeated string classes = 1;</code>
*
* @param value The classes to add.
* @return This builder for chaining.
*/
public Builder addClasses(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureClassesIsMutable();
classes_.add(value);
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* The name of the classes being classified, contains all possible values of
* the target column.
* </pre>
*
* <code>repeated string classes = 1;</code>
*
* @param values The classes to add.
* @return This builder for chaining.
*/
public Builder addAllClasses(java.lang.Iterable<java.lang.String> values) {
ensureClassesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, classes_);
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* The name of the classes being classified, contains all possible values of
* the target column.
* </pre>
*
* <code>repeated string classes = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearClasses() {
classes_ = com.google.protobuf.LazyStringArrayList.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
;
onChanged();
return this;
}
/**
*
*
* <pre>
* The name of the classes being classified, contains all possible values of
* the target column.
* </pre>
*
* <code>repeated string classes = 1;</code>
*
* @param value The bytes of the classes to add.
* @return This builder for chaining.
*/
public Builder addClassesBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
ensureClassesIsMutable();
classes_.add(value);
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private com.google.protobuf.Internal.FloatList scores_ = emptyFloatList();
private void ensureScoresIsMutable() {
if (!scores_.isModifiable()) {
scores_ = makeMutableCopy(scores_);
}
bitField0_ |= 0x00000002;
}
private void ensureScoresIsMutable(int capacity) {
if (!scores_.isModifiable()) {
scores_ = makeMutableCopy(scores_, capacity);
}
bitField0_ |= 0x00000002;
}
/**
*
*
* <pre>
* The model's confidence in each class being correct, higher
* value means higher confidence. The N-th score corresponds to
* the N-th class in classes.
* </pre>
*
* <code>repeated float scores = 2;</code>
*
* @return A list containing the scores.
*/
public java.util.List<java.lang.Float> getScoresList() {
scores_.makeImmutable();
return scores_;
}
/**
*
*
* <pre>
* The model's confidence in each class being correct, higher
* value means higher confidence. The N-th score corresponds to
* the N-th class in classes.
* </pre>
*
* <code>repeated float scores = 2;</code>
*
* @return The count of scores.
*/
public int getScoresCount() {
return scores_.size();
}
/**
*
*
* <pre>
* The model's confidence in each class being correct, higher
* value means higher confidence. The N-th score corresponds to
* the N-th class in classes.
* </pre>
*
* <code>repeated float scores = 2;</code>
*
* @param index The index of the element to return.
* @return The scores at the given index.
*/
public float getScores(int index) {
return scores_.getFloat(index);
}
/**
*
*
* <pre>
* The model's confidence in each class being correct, higher
* value means higher confidence. The N-th score corresponds to
* the N-th class in classes.
* </pre>
*
* <code>repeated float scores = 2;</code>
*
* @param index The index to set the value at.
* @param value The scores to set.
* @return This builder for chaining.
*/
public Builder setScores(int index, float value) {
ensureScoresIsMutable();
scores_.setFloat(index, value);
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The model's confidence in each class being correct, higher
* value means higher confidence. The N-th score corresponds to
* the N-th class in classes.
* </pre>
*
* <code>repeated float scores = 2;</code>
*
* @param value The scores to add.
* @return This builder for chaining.
*/
public Builder addScores(float value) {
ensureScoresIsMutable();
scores_.addFloat(value);
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The model's confidence in each class being correct, higher
* value means higher confidence. The N-th score corresponds to
* the N-th class in classes.
* </pre>
*
* <code>repeated float scores = 2;</code>
*
* @param values The scores to add.
* @return This builder for chaining.
*/
public Builder addAllScores(java.lang.Iterable<? extends java.lang.Float> values) {
ensureScoresIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, scores_);
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The model's confidence in each class being correct, higher
* value means higher confidence. The N-th score corresponds to
* the N-th class in classes.
* </pre>
*
* <code>repeated float scores = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearScores() {
scores_ = emptyFloatList();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1beta1.schema.predict.prediction.TabularClassificationPredictionResult)
}
// @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1beta1.schema.predict.prediction.TabularClassificationPredictionResult)
private static final com.google.cloud.aiplatform.v1beta1.schema.predict.prediction
.TabularClassificationPredictionResult
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE =
new com.google.cloud.aiplatform.v1beta1.schema.predict.prediction
.TabularClassificationPredictionResult();
}
public static com.google.cloud.aiplatform.v1beta1.schema.predict.prediction
.TabularClassificationPredictionResult
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<TabularClassificationPredictionResult> PARSER =
new com.google.protobuf.AbstractParser<TabularClassificationPredictionResult>() {
@java.lang.Override
public TabularClassificationPredictionResult parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<TabularClassificationPredictionResult> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<TabularClassificationPredictionResult> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.schema.predict.prediction
.TabularClassificationPredictionResult
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/iceberg | 34,938 | data/src/test/java/org/apache/iceberg/io/TestPartitioningWriters.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.iceberg.io;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
import static org.assertj.core.api.Assumptions.assumeThat;
import java.io.File;
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.function.Function;
import org.apache.iceberg.DataFile;
import org.apache.iceberg.DeleteFile;
import org.apache.iceberg.FileFormat;
import org.apache.iceberg.Parameter;
import org.apache.iceberg.ParameterizedTestExtension;
import org.apache.iceberg.Parameters;
import org.apache.iceberg.PartitionSpec;
import org.apache.iceberg.RowDelta;
import org.apache.iceberg.Schema;
import org.apache.iceberg.Table;
import org.apache.iceberg.data.BaseDeleteLoader;
import org.apache.iceberg.data.DeleteLoader;
import org.apache.iceberg.deletes.DeleteGranularity;
import org.apache.iceberg.deletes.PositionDelete;
import org.apache.iceberg.deletes.PositionDeleteIndex;
import org.apache.iceberg.expressions.Expressions;
import org.apache.iceberg.relocated.com.google.common.collect.ImmutableList;
import org.apache.iceberg.relocated.com.google.common.collect.Maps;
import org.apache.iceberg.util.ContentFileUtil;
import org.apache.iceberg.util.StructLikeSet;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.TestTemplate;
import org.junit.jupiter.api.extension.ExtendWith;
@ExtendWith(ParameterizedTestExtension.class)
public abstract class TestPartitioningWriters<T> extends WriterTestBase<T> {
@Parameters(name = "formatVersion = {0}, fileFormat = {1}")
protected static List<Object> parameters() {
return Arrays.asList(
new Object[] {2, FileFormat.AVRO},
new Object[] {2, FileFormat.PARQUET},
new Object[] {2, FileFormat.ORC});
}
private static final long TARGET_FILE_SIZE = 128L * 1024 * 1024;
@Parameter(index = 1)
private FileFormat fileFormat;
private OutputFileFactory fileFactory = null;
protected abstract StructLikeSet toSet(Iterable<T> records);
protected FileFormat format() {
return fileFormat;
}
@Override
@BeforeEach
public void setupTable() throws Exception {
this.metadataDir = new File(tableDir, "metadata");
this.table = create(SCHEMA, PartitionSpec.unpartitioned());
this.fileFactory = OutputFileFactory.builderFor(table, 1, 1).format(fileFormat).build();
}
@TestTemplate
public void testClusteredDataWriterNoRecords() throws IOException {
FileWriterFactory<T> writerFactory = newWriterFactory(table.schema());
ClusteredDataWriter<T> writer =
new ClusteredDataWriter<>(writerFactory, fileFactory, table.io(), TARGET_FILE_SIZE);
writer.close();
assertThat(writer.result().dataFiles()).isEmpty();
writer.close();
assertThat(writer.result().dataFiles()).isEmpty();
}
@TestTemplate
public void testClusteredDataWriterMultiplePartitions() throws IOException {
table.updateSpec().addField(Expressions.ref("data")).commit();
FileWriterFactory<T> writerFactory = newWriterFactory(table.schema());
ClusteredDataWriter<T> writer =
new ClusteredDataWriter<>(writerFactory, fileFactory, table.io(), TARGET_FILE_SIZE);
PartitionSpec spec = table.spec();
writer.write(toRow(1, "aaa"), spec, partitionKey(spec, "aaa"));
writer.write(toRow(2, "aaa"), spec, partitionKey(spec, "aaa"));
writer.write(toRow(3, "bbb"), spec, partitionKey(spec, "bbb"));
writer.write(toRow(4, "bbb"), spec, partitionKey(spec, "bbb"));
writer.write(toRow(5, "ccc"), spec, partitionKey(spec, "ccc"));
writer.close();
DataWriteResult result = writer.result();
assertThat(result.dataFiles()).hasSize(3);
RowDelta rowDelta = table.newRowDelta();
result.dataFiles().forEach(rowDelta::addRows);
rowDelta.commit();
List<T> expectedRows =
ImmutableList.of(
toRow(1, "aaa"), toRow(2, "aaa"), toRow(3, "bbb"), toRow(4, "bbb"), toRow(5, "ccc"));
assertThat(actualRowSet("*")).isEqualTo(toSet(expectedRows));
}
@TestTemplate
public void testClusteredDataWriterOutOfOrderPartitions() throws IOException {
table.updateSpec().addField(Expressions.ref("data")).commit();
FileWriterFactory<T> writerFactory = newWriterFactory(table.schema());
ClusteredDataWriter<T> writer =
new ClusteredDataWriter<>(writerFactory, fileFactory, table.io(), TARGET_FILE_SIZE);
PartitionSpec spec = table.spec();
writer.write(toRow(1, "aaa"), spec, partitionKey(spec, "aaa"));
writer.write(toRow(2, "aaa"), spec, partitionKey(spec, "aaa"));
writer.write(toRow(3, "bbb"), spec, partitionKey(spec, "bbb"));
writer.write(toRow(4, "bbb"), spec, partitionKey(spec, "bbb"));
writer.write(toRow(5, "ccc"), spec, partitionKey(spec, "ccc"));
assertThatThrownBy(() -> writer.write(toRow(6, "aaa"), spec, partitionKey(spec, "aaa")))
.isInstanceOf(IllegalStateException.class)
.hasMessageContaining("Encountered records that belong to already closed files")
.hasMessageEndingWith("partition 'data=aaa' in spec " + spec);
writer.close();
}
@TestTemplate
public void testClusteredEqualityDeleteWriterNoRecords() throws IOException {
List<Integer> equalityFieldIds = ImmutableList.of(table.schema().findField("id").fieldId());
Schema equalityDeleteRowSchema = table.schema().select("id");
FileWriterFactory<T> writerFactory =
newWriterFactory(table.schema(), equalityFieldIds, equalityDeleteRowSchema);
ClusteredEqualityDeleteWriter<T> writer =
new ClusteredEqualityDeleteWriter<>(
writerFactory, fileFactory, table.io(), TARGET_FILE_SIZE);
writer.close();
assertThat(writer.result().deleteFiles()).isEmpty();
assertThat(writer.result().referencedDataFiles()).isEmpty();
assertThat(writer.result().referencesDataFiles()).isFalse();
writer.close();
assertThat(writer.result().deleteFiles()).isEmpty();
assertThat(writer.result().referencedDataFiles()).isEmpty();
assertThat(writer.result().referencesDataFiles()).isFalse();
}
@TestTemplate
public void testClusteredEqualityDeleteWriterMultipleSpecs() throws IOException {
List<Integer> equalityFieldIds = ImmutableList.of(table.schema().findField("id").fieldId());
Schema equalityDeleteRowSchema = table.schema().select("id");
FileWriterFactory<T> writerFactory =
newWriterFactory(table.schema(), equalityFieldIds, equalityDeleteRowSchema);
// add an unpartitioned data file
ImmutableList<T> rows1 = ImmutableList.of(toRow(1, "aaa"), toRow(2, "aaa"), toRow(11, "aaa"));
DataFile dataFile1 = writeData(writerFactory, fileFactory, rows1, table.spec(), null);
table.newFastAppend().appendFile(dataFile1).commit();
// partition by bucket
table.updateSpec().addField(Expressions.bucket("data", 16)).commit();
// add a data file partitioned by bucket
ImmutableList<T> rows2 = ImmutableList.of(toRow(3, "bbb"), toRow(4, "bbb"), toRow(12, "bbb"));
DataFile dataFile2 =
writeData(
writerFactory, fileFactory, rows2, table.spec(), partitionKey(table.spec(), "bbb"));
table.newFastAppend().appendFile(dataFile2).commit();
// partition by data
table
.updateSpec()
.removeField(Expressions.bucket("data", 16))
.addField(Expressions.ref("data"))
.commit();
// add a data file partitioned by data
ImmutableList<T> rows3 = ImmutableList.of(toRow(5, "ccc"), toRow(13, "ccc"));
DataFile dataFile3 =
writeData(
writerFactory, fileFactory, rows3, table.spec(), partitionKey(table.spec(), "ccc"));
table.newFastAppend().appendFile(dataFile3).commit();
ClusteredEqualityDeleteWriter<T> writer =
new ClusteredEqualityDeleteWriter<>(
writerFactory, fileFactory, table.io(), TARGET_FILE_SIZE);
PartitionSpec unpartitionedSpec = table.specs().get(0);
PartitionSpec bucketSpec = table.specs().get(1);
PartitionSpec identitySpec = table.specs().get(2);
writer.write(toRow(1, "aaa"), unpartitionedSpec, null);
writer.write(toRow(2, "aaa"), unpartitionedSpec, null);
writer.write(toRow(3, "bbb"), bucketSpec, partitionKey(bucketSpec, "bbb"));
writer.write(toRow(4, "bbb"), bucketSpec, partitionKey(bucketSpec, "bbb"));
writer.write(toRow(5, "ccc"), identitySpec, partitionKey(identitySpec, "ccc"));
writer.close();
DeleteWriteResult result = writer.result();
assertThat(result.deleteFiles()).hasSize(3);
assertThat(writer.result().referencedDataFiles()).isEmpty();
assertThat(writer.result().referencesDataFiles()).isFalse();
RowDelta rowDelta = table.newRowDelta();
result.deleteFiles().forEach(rowDelta::addDeletes);
rowDelta.commit();
List<T> expectedRows = ImmutableList.of(toRow(11, "aaa"), toRow(12, "bbb"), toRow(13, "ccc"));
assertThat(actualRowSet("*")).isEqualTo(toSet(expectedRows));
}
@TestTemplate
public void testClusteredEqualityDeleteWriterOutOfOrderSpecsAndPartitions() throws IOException {
List<Integer> equalityFieldIds = ImmutableList.of(table.schema().findField("id").fieldId());
Schema equalityDeleteRowSchema = table.schema().select("id");
FileWriterFactory<T> writerFactory =
newWriterFactory(table.schema(), equalityFieldIds, equalityDeleteRowSchema);
table.updateSpec().addField(Expressions.bucket("data", 16)).commit();
table
.updateSpec()
.removeField(Expressions.bucket("data", 16))
.addField(Expressions.ref("data"))
.commit();
ClusteredEqualityDeleteWriter<T> writer =
new ClusteredEqualityDeleteWriter<>(
writerFactory, fileFactory, table.io(), TARGET_FILE_SIZE);
PartitionSpec unpartitionedSpec = table.specs().get(0);
PartitionSpec bucketSpec = table.specs().get(1);
PartitionSpec identitySpec = table.specs().get(2);
writer.write(toRow(1, "aaa"), unpartitionedSpec, null);
writer.write(toRow(2, "aaa"), unpartitionedSpec, null);
writer.write(toRow(3, "bbb"), bucketSpec, partitionKey(bucketSpec, "bbb"));
writer.write(toRow(4, "bbb"), bucketSpec, partitionKey(bucketSpec, "bbb"));
writer.write(toRow(5, "ccc"), identitySpec, partitionKey(identitySpec, "ccc"));
writer.write(toRow(6, "ddd"), identitySpec, partitionKey(identitySpec, "ddd"));
assertThatThrownBy(
() -> writer.write(toRow(7, "ccc"), identitySpec, partitionKey(identitySpec, "ccc")))
.isInstanceOf(IllegalStateException.class)
.hasMessageContaining("Encountered records that belong to already closed files")
.hasMessageEndingWith("partition 'data=ccc' in spec " + identitySpec);
assertThatThrownBy(() -> writer.write(toRow(7, "aaa"), unpartitionedSpec, null))
.isInstanceOf(IllegalStateException.class)
.hasMessageContaining("Encountered records that belong to already closed files")
.hasMessageEndingWith("spec []");
writer.close();
}
@TestTemplate
public void testClusteredPositionDeleteWriterNoRecordsPartitionGranularity() throws IOException {
checkClusteredPositionDeleteWriterNoRecords(DeleteGranularity.PARTITION);
}
@TestTemplate
public void testClusteredPositionDeleteWriterNoRecordsFileGranularity() throws IOException {
checkClusteredPositionDeleteWriterNoRecords(DeleteGranularity.FILE);
}
private void checkClusteredPositionDeleteWriterNoRecords(DeleteGranularity deleteGranularity)
throws IOException {
FileWriterFactory<T> writerFactory = newWriterFactory(table.schema());
ClusteredPositionDeleteWriter<T> writer =
new ClusteredPositionDeleteWriter<>(
writerFactory, fileFactory, table.io(), TARGET_FILE_SIZE, deleteGranularity);
writer.close();
assertThat(writer.result().deleteFiles()).isEmpty();
assertThat(writer.result().referencedDataFiles()).isEmpty();
assertThat(writer.result().referencesDataFiles()).isFalse();
writer.close();
assertThat(writer.result().deleteFiles()).isEmpty();
assertThat(writer.result().referencedDataFiles()).isEmpty();
assertThat(writer.result().referencesDataFiles()).isFalse();
}
@TestTemplate
public void testClusteredPositionDeleteWriterMultipleSpecsPartitionGranularity()
throws IOException {
checkClusteredPositionDeleteWriterMultipleSpecs(DeleteGranularity.PARTITION);
}
@TestTemplate
public void testClusteredPositionDeleteWriterMultipleSpecsFileGranularity() throws IOException {
checkClusteredPositionDeleteWriterMultipleSpecs(DeleteGranularity.FILE);
}
private void checkClusteredPositionDeleteWriterMultipleSpecs(DeleteGranularity deleteGranularity)
throws IOException {
FileWriterFactory<T> writerFactory = newWriterFactory(table.schema());
// add an unpartitioned data file
ImmutableList<T> rows1 = ImmutableList.of(toRow(1, "aaa"), toRow(2, "aaa"), toRow(11, "aaa"));
DataFile dataFile1 = writeData(writerFactory, fileFactory, rows1, table.spec(), null);
table.newFastAppend().appendFile(dataFile1).commit();
// partition by bucket
table.updateSpec().addField(Expressions.bucket("data", 16)).commit();
// add a data file partitioned by bucket
ImmutableList<T> rows2 = ImmutableList.of(toRow(3, "bbb"), toRow(4, "bbb"), toRow(12, "bbb"));
DataFile dataFile2 =
writeData(
writerFactory, fileFactory, rows2, table.spec(), partitionKey(table.spec(), "bbb"));
table.newFastAppend().appendFile(dataFile2).commit();
// partition by data
table
.updateSpec()
.removeField(Expressions.bucket("data", 16))
.addField(Expressions.ref("data"))
.commit();
// add a data file partitioned by data
ImmutableList<T> rows3 = ImmutableList.of(toRow(5, "ccc"), toRow(13, "ccc"));
DataFile dataFile3 =
writeData(
writerFactory, fileFactory, rows3, table.spec(), partitionKey(table.spec(), "ccc"));
table.newFastAppend().appendFile(dataFile3).commit();
ClusteredPositionDeleteWriter<T> writer =
new ClusteredPositionDeleteWriter<>(
writerFactory, fileFactory, table.io(), TARGET_FILE_SIZE, deleteGranularity);
PartitionSpec unpartitionedSpec = table.specs().get(0);
PartitionSpec bucketSpec = table.specs().get(1);
PartitionSpec identitySpec = table.specs().get(2);
writer.write(positionDelete(dataFile1.location(), 0L, null), unpartitionedSpec, null);
writer.write(positionDelete(dataFile1.location(), 1L, null), unpartitionedSpec, null);
writer.write(
positionDelete(dataFile2.location(), 0L, null),
bucketSpec,
partitionKey(bucketSpec, "bbb"));
writer.write(
positionDelete(dataFile2.location(), 1L, null),
bucketSpec,
partitionKey(bucketSpec, "bbb"));
writer.write(
positionDelete(dataFile3.location(), 0L, null),
identitySpec,
partitionKey(identitySpec, "ccc"));
writer.close();
DeleteWriteResult result = writer.result();
assertThat(writer.result().deleteFiles()).hasSize(3);
assertThat(writer.result().referencedDataFiles()).hasSize(3);
assertThat(writer.result().referencesDataFiles()).isTrue();
RowDelta rowDelta = table.newRowDelta();
result.deleteFiles().forEach(rowDelta::addDeletes);
rowDelta.commit();
List<T> expectedRows = ImmutableList.of(toRow(11, "aaa"), toRow(12, "bbb"), toRow(13, "ccc"));
assertThat(actualRowSet("*")).isEqualTo(toSet(expectedRows));
}
@TestTemplate
public void testClusteredPositionDeleteWriterOutOfOrderSpecsAndPartitionsPartitionGranularity()
throws IOException {
checkClusteredPositionDeleteWriterOutOfOrderSpecsAndPartitions(DeleteGranularity.PARTITION);
}
@TestTemplate
public void testClusteredPositionDeleteWriterOutOfOrderSpecsAndPartitionsFileGranularity()
throws IOException {
checkClusteredPositionDeleteWriterOutOfOrderSpecsAndPartitions(DeleteGranularity.FILE);
}
private void checkClusteredPositionDeleteWriterOutOfOrderSpecsAndPartitions(
DeleteGranularity deleteGranularity) throws IOException {
FileWriterFactory<T> writerFactory = newWriterFactory(table.schema());
table.updateSpec().addField(Expressions.bucket("data", 16)).commit();
table
.updateSpec()
.removeField(Expressions.bucket("data", 16))
.addField(Expressions.ref("data"))
.commit();
ClusteredPositionDeleteWriter<T> writer =
new ClusteredPositionDeleteWriter<>(
writerFactory, fileFactory, table.io(), TARGET_FILE_SIZE, deleteGranularity);
PartitionSpec unpartitionedSpec = table.specs().get(0);
PartitionSpec bucketSpec = table.specs().get(1);
PartitionSpec identitySpec = table.specs().get(2);
writer.write(positionDelete("file-1.parquet", 0L, null), unpartitionedSpec, null);
writer.write(positionDelete("file-1.parquet", 1L, null), unpartitionedSpec, null);
writer.write(
positionDelete("file-2.parquet", 0L, null), bucketSpec, partitionKey(bucketSpec, "bbb"));
writer.write(
positionDelete("file-2.parquet", 1L, null), bucketSpec, partitionKey(bucketSpec, "bbb"));
writer.write(
positionDelete("file-3.parquet", 0L, null),
identitySpec,
partitionKey(identitySpec, "ccc"));
writer.write(
positionDelete("file-4.parquet", 0L, null),
identitySpec,
partitionKey(identitySpec, "ddd"));
assertThatThrownBy(
() -> {
PositionDelete<T> positionDelete = positionDelete("file-5.parquet", 1L, null);
writer.write(positionDelete, identitySpec, partitionKey(identitySpec, "ccc"));
})
.isInstanceOf(IllegalStateException.class)
.hasMessageContaining("Encountered records that belong to already closed files")
.hasMessageEndingWith("partition 'data=ccc' in spec " + identitySpec);
assertThatThrownBy(
() -> {
PositionDelete<T> positionDelete = positionDelete("file-1.parquet", 3L, null);
writer.write(positionDelete, unpartitionedSpec, null);
})
.isInstanceOf(IllegalStateException.class)
.hasMessageContaining("Encountered records that belong to already closed files")
.hasMessageEndingWith("spec []");
writer.close();
}
@TestTemplate
public void testClusteredPositionDeleteWriterPartitionGranularity() throws IOException {
checkClusteredPositionDeleteWriterGranularity(DeleteGranularity.PARTITION);
}
@TestTemplate
public void testClusteredPositionDeleteWriterFileGranularity() throws IOException {
checkClusteredPositionDeleteWriterGranularity(DeleteGranularity.FILE);
}
private void checkClusteredPositionDeleteWriterGranularity(DeleteGranularity deleteGranularity)
throws IOException {
FileWriterFactory<T> writerFactory = newWriterFactory(table.schema());
// add the first data file
List<T> rows1 = ImmutableList.of(toRow(1, "aaa"), toRow(2, "aaa"), toRow(11, "aaa"));
DataFile dataFile1 = writeData(writerFactory, fileFactory, rows1, table.spec(), null);
table.newFastAppend().appendFile(dataFile1).commit();
// add the second data file
List<T> rows2 = ImmutableList.of(toRow(3, "aaa"), toRow(4, "aaa"), toRow(12, "aaa"));
DataFile dataFile2 = writeData(writerFactory, fileFactory, rows2, table.spec(), null);
table.newFastAppend().appendFile(dataFile2).commit();
// init the delete writer
ClusteredPositionDeleteWriter<T> writer =
new ClusteredPositionDeleteWriter<>(
writerFactory, fileFactory, table.io(), TARGET_FILE_SIZE, deleteGranularity);
PartitionSpec spec = table.spec();
// write deletes for both data files
writer.write(positionDelete(dataFile1.location(), 0L, null), spec, null);
writer.write(positionDelete(dataFile1.location(), 1L, null), spec, null);
writer.write(positionDelete(dataFile2.location(), 0L, null), spec, null);
writer.write(positionDelete(dataFile2.location(), 1L, null), spec, null);
writer.close();
// verify the writer result
DeleteWriteResult result = writer.result();
int expectedNumDeleteFiles = deleteGranularity == DeleteGranularity.FILE ? 2 : 1;
assertThat(result.deleteFiles()).hasSize(expectedNumDeleteFiles);
assertThat(result.referencedDataFiles()).hasSize(2);
assertThat(result.referencesDataFiles()).isTrue();
// commit the deletes
RowDelta rowDelta = table.newRowDelta();
result.deleteFiles().forEach(rowDelta::addDeletes);
rowDelta.commit();
// verify correctness
List<T> expectedRows = ImmutableList.of(toRow(11, "aaa"), toRow(12, "aaa"));
assertThat(actualRowSet("*")).isEqualTo(toSet(expectedRows));
}
@TestTemplate
public void testFanoutDataWriterNoRecords() throws IOException {
FileWriterFactory<T> writerFactory = newWriterFactory(table.schema());
FanoutDataWriter<T> writer =
new FanoutDataWriter<>(writerFactory, fileFactory, table.io(), TARGET_FILE_SIZE);
writer.close();
assertThat(writer.result().dataFiles()).isEmpty();
writer.close();
assertThat(writer.result().dataFiles()).isEmpty();
}
@TestTemplate
public void testFanoutDataWriterMultiplePartitions() throws IOException {
table.updateSpec().addField(Expressions.ref("data")).commit();
FileWriterFactory<T> writerFactory = newWriterFactory(table.schema());
FanoutDataWriter<T> writer =
new FanoutDataWriter<>(writerFactory, fileFactory, table.io(), TARGET_FILE_SIZE);
PartitionSpec spec = table.spec();
writer.write(toRow(1, "aaa"), spec, partitionKey(spec, "aaa"));
writer.write(toRow(3, "bbb"), spec, partitionKey(spec, "bbb"));
writer.write(toRow(2, "aaa"), spec, partitionKey(spec, "aaa"));
writer.write(toRow(4, "bbb"), spec, partitionKey(spec, "bbb"));
writer.write(toRow(5, "ccc"), spec, partitionKey(spec, "ccc"));
writer.close();
DataWriteResult result = writer.result();
assertThat(result.dataFiles()).hasSize(3);
RowDelta rowDelta = table.newRowDelta();
result.dataFiles().forEach(rowDelta::addRows);
rowDelta.commit();
List<T> expectedRows =
ImmutableList.of(
toRow(1, "aaa"), toRow(2, "aaa"), toRow(3, "bbb"), toRow(4, "bbb"), toRow(5, "ccc"));
assertThat(actualRowSet("*")).isEqualTo(toSet(expectedRows));
}
@TestTemplate
public void testFanoutPositionOnlyDeleteWriterNoRecordsPartitionGranularity() throws IOException {
checkFanoutPositionOnlyDeleteWriterNoRecords(DeleteGranularity.PARTITION);
}
@TestTemplate
public void testFanoutPositionOnlyDeleteWriterNoRecordsFileGranularity() throws IOException {
checkFanoutPositionOnlyDeleteWriterNoRecords(DeleteGranularity.FILE);
}
private void checkFanoutPositionOnlyDeleteWriterNoRecords(DeleteGranularity deleteGranularity)
throws IOException {
FileWriterFactory<T> writerFactory = newWriterFactory(table.schema());
FanoutPositionOnlyDeleteWriter<T> writer =
new FanoutPositionOnlyDeleteWriter<>(
writerFactory, fileFactory, table.io(), TARGET_FILE_SIZE, deleteGranularity);
writer.close();
assertThat(writer.result().deleteFiles()).isEmpty();
assertThat(writer.result().referencedDataFiles()).isEmpty();
assertThat(writer.result().referencesDataFiles()).isFalse();
writer.close();
assertThat(writer.result().deleteFiles()).isEmpty();
assertThat(writer.result().referencedDataFiles()).isEmpty();
assertThat(writer.result().referencesDataFiles()).isFalse();
}
@TestTemplate
public void testFanoutPositionOnlyDeleteWriterOutOfOrderRecordsPartitionGranularity()
throws IOException {
checkFanoutPositionOnlyDeleteWriterOutOfOrderRecords(DeleteGranularity.PARTITION);
}
@TestTemplate
public void testFanoutPositionOnlyDeleteWriterOutOfOrderRecordsFileGranularity()
throws IOException {
checkFanoutPositionOnlyDeleteWriterOutOfOrderRecords(DeleteGranularity.FILE);
}
private void checkFanoutPositionOnlyDeleteWriterOutOfOrderRecords(
DeleteGranularity deleteGranularity) throws IOException {
FileWriterFactory<T> writerFactory = newWriterFactory(table.schema());
// add an unpartitioned data file
ImmutableList<T> rows1 = ImmutableList.of(toRow(1, "aaa"), toRow(2, "aaa"), toRow(11, "aaa"));
DataFile dataFile1 = writeData(writerFactory, fileFactory, rows1, table.spec(), null);
table.newFastAppend().appendFile(dataFile1).commit();
// partition by bucket
table.updateSpec().addField(Expressions.bucket("data", 16)).commit();
// add a data file partitioned by bucket
ImmutableList<T> rows2 = ImmutableList.of(toRow(3, "bbb"), toRow(4, "bbb"), toRow(12, "bbb"));
DataFile dataFile2 =
writeData(
writerFactory, fileFactory, rows2, table.spec(), partitionKey(table.spec(), "bbb"));
table.newFastAppend().appendFile(dataFile2).commit();
// partition by data
table
.updateSpec()
.removeField(Expressions.bucket("data", 16))
.addField(Expressions.ref("data"))
.commit();
// add a data file partitioned by data
ImmutableList<T> rows3 = ImmutableList.of(toRow(5, "ccc"), toRow(13, "ccc"));
DataFile dataFile3 =
writeData(
writerFactory, fileFactory, rows3, table.spec(), partitionKey(table.spec(), "ccc"));
table.newFastAppend().appendFile(dataFile3).commit();
FanoutPositionOnlyDeleteWriter<T> writer =
new FanoutPositionOnlyDeleteWriter<>(
writerFactory, fileFactory, table.io(), TARGET_FILE_SIZE, deleteGranularity);
PartitionSpec unpartitionedSpec = table.specs().get(0);
PartitionSpec bucketSpec = table.specs().get(1);
PartitionSpec identitySpec = table.specs().get(2);
writer.write(positionDelete(dataFile1.location(), 1L, null), unpartitionedSpec, null);
writer.write(
positionDelete(dataFile2.location(), 1L, null),
bucketSpec,
partitionKey(bucketSpec, "bbb"));
writer.write(
positionDelete(dataFile2.location(), 0L, null),
bucketSpec,
partitionKey(bucketSpec, "bbb"));
writer.write(
positionDelete(dataFile3.location(), 1L, null),
identitySpec,
partitionKey(identitySpec, "ccc"));
writer.write(
positionDelete(dataFile3.location(), 2L, null),
identitySpec,
partitionKey(identitySpec, "ccc"));
writer.write(positionDelete(dataFile1.location(), 0L, null), unpartitionedSpec, null);
writer.write(
positionDelete(dataFile3.location(), 0L, null),
identitySpec,
partitionKey(identitySpec, "ccc"));
writer.write(positionDelete(dataFile1.location(), 2L, null), unpartitionedSpec, null);
writer.close();
DeleteWriteResult result = writer.result();
assertThat(writer.result().deleteFiles()).hasSize(3);
assertThat(writer.result().referencedDataFiles()).hasSize(3);
assertThat(writer.result().referencesDataFiles()).isTrue();
RowDelta rowDelta = table.newRowDelta();
result.deleteFiles().forEach(rowDelta::addDeletes);
rowDelta.commit();
List<T> expectedRows = ImmutableList.of(toRow(12, "bbb"));
assertThat(actualRowSet("*")).isEqualTo(toSet(expectedRows));
}
@TestTemplate
public void testFanoutPositionOnlyDeleteWriterPartitionGranularity() throws IOException {
checkFanoutPositionOnlyDeleteWriterGranularity(DeleteGranularity.PARTITION);
}
@TestTemplate
public void testFanoutPositionOnlyDeleteWriterFileGranularity() throws IOException {
checkFanoutPositionOnlyDeleteWriterGranularity(DeleteGranularity.FILE);
}
private void checkFanoutPositionOnlyDeleteWriterGranularity(DeleteGranularity deleteGranularity)
throws IOException {
FileWriterFactory<T> writerFactory = newWriterFactory(table.schema());
// add the first data file
List<T> rows1 = ImmutableList.of(toRow(1, "aaa"), toRow(2, "aaa"), toRow(11, "aaa"));
DataFile dataFile1 = writeData(writerFactory, fileFactory, rows1, table.spec(), null);
table.newFastAppend().appendFile(dataFile1).commit();
// add the second data file
List<T> rows2 = ImmutableList.of(toRow(3, "aaa"), toRow(4, "aaa"), toRow(12, "aaa"));
DataFile dataFile2 = writeData(writerFactory, fileFactory, rows2, table.spec(), null);
table.newFastAppend().appendFile(dataFile2).commit();
// init the delete writer
FanoutPositionOnlyDeleteWriter<T> writer =
new FanoutPositionOnlyDeleteWriter<>(
writerFactory, fileFactory, table.io(), TARGET_FILE_SIZE, deleteGranularity);
PartitionSpec spec = table.spec();
// write deletes for both data files (the order of records is mixed)
writer.write(positionDelete(dataFile1.location(), 1L, null), spec, null);
writer.write(positionDelete(dataFile2.location(), 0L, null), spec, null);
writer.write(positionDelete(dataFile1.location(), 0L, null), spec, null);
writer.write(positionDelete(dataFile2.location(), 1L, null), spec, null);
writer.close();
// verify the writer result
DeleteWriteResult result = writer.result();
int expectedNumDeleteFiles = deleteGranularity == DeleteGranularity.FILE ? 2 : 1;
assertThat(result.deleteFiles()).hasSize(expectedNumDeleteFiles);
assertThat(result.referencedDataFiles()).hasSize(2);
assertThat(result.referencesDataFiles()).isTrue();
// commit the deletes
RowDelta rowDelta = table.newRowDelta();
result.deleteFiles().forEach(rowDelta::addDeletes);
rowDelta.commit();
// verify correctness
List<T> expectedRows = ImmutableList.of(toRow(11, "aaa"), toRow(12, "aaa"));
assertThat(actualRowSet("*")).isEqualTo(toSet(expectedRows));
}
@TestTemplate
public void testRewriteOfPreviousDeletes() throws IOException {
assumeThat(format()).isIn(FileFormat.PARQUET, FileFormat.ORC);
FileWriterFactory<T> writerFactory = newWriterFactory(table.schema());
// add the first data file
List<T> rows1 = ImmutableList.of(toRow(1, "aaa"), toRow(2, "aaa"), toRow(11, "aaa"));
DataFile dataFile1 = writeData(writerFactory, fileFactory, rows1, table.spec(), null);
table.newFastAppend().appendFile(dataFile1).commit();
// add the second data file
List<T> rows2 = ImmutableList.of(toRow(3, "aaa"), toRow(4, "aaa"), toRow(12, "aaa"));
DataFile dataFile2 = writeData(writerFactory, fileFactory, rows2, table.spec(), null);
table.newFastAppend().appendFile(dataFile2).commit();
PartitionSpec spec = table.spec();
// init the first delete writer without access to previous deletes
FanoutPositionOnlyDeleteWriter<T> writer1 =
new FanoutPositionOnlyDeleteWriter<>(
writerFactory, fileFactory, table.io(), TARGET_FILE_SIZE, DeleteGranularity.FILE);
// write initial deletes for both data files
writer1.write(positionDelete(dataFile1.location(), 1L), spec, null);
writer1.write(positionDelete(dataFile2.location(), 1L), spec, null);
writer1.close();
// verify the writer result
DeleteWriteResult result1 = writer1.result();
assertThat(result1.deleteFiles()).hasSize(2);
assertThat(result1.referencedDataFiles()).hasSize(2);
assertThat(result1.referencesDataFiles()).isTrue();
assertThat(result1.rewrittenDeleteFiles()).isEmpty();
// commit the initial deletes
RowDelta rowDelta1 = table.newRowDelta();
result1.deleteFiles().forEach(rowDelta1::addDeletes);
rowDelta1.commit();
// verify correctness of the first delete operation
List<T> expectedRows1 =
ImmutableList.of(toRow(1, "aaa"), toRow(3, "aaa"), toRow(11, "aaa"), toRow(12, "aaa"));
assertThat(actualRowSet("*")).isEqualTo(toSet(expectedRows1));
// populate previous delete mapping
Map<String, DeleteFile> previousDeletes = Maps.newHashMap();
for (DeleteFile deleteFile : result1.deleteFiles()) {
String dataLocation = ContentFileUtil.referencedDataFile(deleteFile).toString();
previousDeletes.put(dataLocation, deleteFile);
}
// init the second delete writer with access to previous deletes
FanoutPositionOnlyDeleteWriter<T> writer2 =
new FanoutPositionOnlyDeleteWriter<>(
writerFactory,
fileFactory,
table.io(),
TARGET_FILE_SIZE,
DeleteGranularity.FILE,
new PreviousDeleteLoader(table, previousDeletes));
// write more deletes for both data files
writer2.write(positionDelete(dataFile1.location(), 0L), spec, null);
writer2.write(positionDelete(dataFile2.location(), 0L), spec, null);
writer2.close();
// verify the writer result
DeleteWriteResult result2 = writer2.result();
assertThat(result2.deleteFiles()).hasSize(2);
assertThat(result2.referencedDataFiles()).hasSize(2);
assertThat(result2.referencesDataFiles()).isTrue();
assertThat(result2.rewrittenDeleteFiles()).hasSize(2);
// add new and remove rewritten delete files
RowDelta rowDelta2 = table.newRowDelta();
result2.deleteFiles().forEach(rowDelta2::addDeletes);
result2.rewrittenDeleteFiles().forEach(rowDelta2::removeDeletes);
rowDelta2.commit();
// verify correctness of the second delete operation
List<T> expectedRows2 = ImmutableList.of(toRow(11, "aaa"), toRow(12, "aaa"));
assertThat(actualRowSet("*")).isEqualTo(toSet(expectedRows2));
}
private static class PreviousDeleteLoader implements Function<CharSequence, PositionDeleteIndex> {
private final Map<String, DeleteFile> deleteFiles;
private final DeleteLoader deleteLoader;
PreviousDeleteLoader(Table table, Map<String, DeleteFile> deleteFiles) {
this.deleteFiles = deleteFiles;
this.deleteLoader = new BaseDeleteLoader(deleteFile -> table.io().newInputFile(deleteFile));
}
@Override
public PositionDeleteIndex apply(CharSequence path) {
DeleteFile deleteFile = deleteFiles.get(path);
return deleteLoader.loadPositionDeletes(ImmutableList.of(deleteFile), path);
}
}
}
|
google/closure-templates | 35,043 | java/src/com/google/template/soy/parsepasses/contextautoesc/RawTextContextUpdater.java | /*
* Copyright 2010 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.template.soy.parsepasses.contextautoesc;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Preconditions.checkState;
import com.google.auto.value.AutoValue;
import com.google.common.base.MoreObjects;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.template.soy.internal.base.UnescapeUtils;
import com.google.template.soy.parsepasses.contextautoesc.Context.AttributeEndDelimiter;
import com.google.template.soy.parsepasses.contextautoesc.Context.HtmlHtmlAttributePosition;
import com.google.template.soy.parsepasses.contextautoesc.Context.UriPart;
import com.google.template.soy.parsepasses.contextautoesc.Context.UriType;
import com.google.template.soy.soytree.HtmlContext;
import com.google.template.soy.soytree.RawTextNode;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.annotation.Nullable;
/**
* Propagates {@link Context}s across raw text chunks using a state-machine parser for HTML/CSS/JS.
*
* <p>Given some raw JS text {@code var x = "foo";} and the {@link Context#JS JS} starting context,
* this class will decompose the rawText into a number of tokens and compute follow on contexts for
* each.
*
* <table>
* <tr><td>{@code var x = "}</td><td>{@link HtmlContext#JS}</td></tr>
* <tr><td>{@code foo}</td><td>{@link HtmlContext#JS_DQ_STRING}</td></tr>
* <tr><td>{@code ";}</td><td>{@link HtmlContext#JS}</td></tr>
* </table>
*
* <h2>A note on regular expressions.
*
* <p>This class uses a number of regular expressions to detect context transition boundaries and it
* uses the Java builtin regex engine. This is a backtracking regex engine and so has the
* possibility of failing with stack overflow errors on large inputs. This is normally triggered by
* the following:
*
* <ul>
* <li>A regex containing a repeated alternation e.g. {@code (A|B)+}
* <li>A large input string, that matches with many repetitions.
* </ul>
*
* <p>To cope with this you can do a few things
*
* <ul>
* <li>Move repetition inside the alternation where possible e.g. {@code (A+|B+)+}
* <li>Make the repetition quantifiers possesive e.g. {@code (A|B)++}. This causes the engine to
* 'commit' to a choice and thus avoid recursion.
* </ul>
*
* <p>The other option would be to switch to a different regex engine less prone to these issues
* like RE2. However, there are some downsides
*
* <ul>
* <li>The java implementations are not as performant or require the use of native libraries.
* <li>It would add a new open source dependency.
* </ul>
*
* <p>So, for the time being we should just be careful.
*/
final class RawTextContextUpdater {
/**
* @param rawTextNode A chunk of HTML/CSS/JS.
* @param context The context before rawText.
* @return the next context transition.
*/
public static Context processRawText(RawTextNode rawTextNode, Context context) {
String rawText = rawTextNode.getRawText();
// If we are in an attribute value, then decode the text.
if (context.delimType() != AttributeEndDelimiter.NONE) {
// this text is part of an attribute value, so we should unescape it.
// NOTE: our caller guarantees (by way of the html parser) that this text cannot exceed the
// bounds of the attribute, so we can just unescape the whole thing.
rawText = UnescapeUtils.unescapeHtml(rawText);
}
int offset = 0;
int length = rawText.length();
RawTextContextUpdater cu = new RawTextContextUpdater(context);
while (offset < length) {
offset += cu.processNextToken(rawTextNode, offset, rawText.substring(offset));
}
return cu.context;
}
private Context context;
private RawTextContextUpdater(Context context) {
this.context = checkNotNull(context);
}
/**
* Consume a portion of text and compute the next context. Output is stored in member variables.
*
* @param node The node currently being processed
* @param offset The offset into the node where text starts
* @param text Non empty.
* @return the number of characters consumed
*/
private int processNextToken(RawTextNode node, int offset, String text) {
// Find the transition whose pattern matches earliest in the raw text (and is applicable)
Processor processor = TRANSITIONS.get(context.state());
if (processor == null) {
throw new NullPointerException(
"no transitions for state: "
+ context.state()
+ " @"
+ node.substringLocation(offset, offset + 1));
}
Processor.Result result = processor.processText(context, node, offset, text);
this.context = result.nextContext();
return result.numCharactersConsumed();
}
interface Processor {
@AutoValue
abstract class Result {
static Result create(Context next, int numCharactersConsumed) {
return new AutoValue_RawTextContextUpdater_Processor_Result(next, numCharactersConsumed);
}
abstract Context nextContext();
abstract int numCharactersConsumed();
}
Result processText(Context context, RawTextNode node, int offset, String text);
}
private static final class TransitionSetProcessor implements Processor {
static TransitionSetProcessor of(Transition... transitions) {
return new TransitionSetProcessor(ImmutableList.copyOf(transitions));
}
final ImmutableList<Transition> transitions;
TransitionSetProcessor(ImmutableList<Transition> transitions) {
this.transitions = transitions;
}
@Override
public Result processText(Context context, RawTextNode node, int offset, String text) {
// Find the transition whose pattern matches earliest in the raw text (and is applicable)
int numCharsConsumed;
Context next;
int earliestStart = Integer.MAX_VALUE;
int earliestEnd = -1;
Transition earliestTransition = null;
Matcher earliestMatcher = null;
for (Transition transition : transitions) {
if (transition.pattern != null) {
Matcher matcher = transition.pattern.matcher(text);
// Ideally we would use Matcher.region to limit the scope of text being examined so we
// don't find matches past the current earliest end. However, since some transitions aree
// triggered by multicharacter regexes, arbitrarily limiting the range might prevent
// legitamate matches. Switching to fully lexer style matches is the most performant
// solution.
// For each transition:
// look for matches, if the match is later than the current earliest match, give up
// otherwise if the match is applicable, store it.
// NOTE: matcher.find() returns matches in sequential order.
try {
while (matcher.find() && matcher.start() < earliestStart) {
int start = matcher.start();
int end = matcher.end();
if (transition.isApplicableTo(context, matcher)) {
earliestStart = start;
earliestEnd = end;
earliestTransition = transition;
earliestMatcher = matcher;
break;
}
}
} catch (StackOverflowError soe) {
// catch and annotate with the pattern.
throw new RuntimeException(
String.format(
"StackOverflow while trying to match: '%s' in context %s starting @ %s",
transition.pattern, context, node.substringLocation(offset, offset + 1)),
soe);
}
} else if (transition.literal != null) {
String needle = transition.literal;
String range =
earliestStart != Integer.MAX_VALUE
? text.substring(0, Math.min(earliestStart + needle.length() - 1, text.length()))
: text;
int index = range.indexOf(needle);
if (index != -1 && transition.isApplicableTo(context, null)) {
checkState(index < earliestStart);
earliestStart = index;
earliestEnd = index + needle.length();
earliestTransition = transition;
earliestMatcher = null;
}
} else {
if (text.length() < earliestStart && transition.isApplicableTo(context, null)) {
earliestStart = text.length();
earliestEnd = text.length();
earliestTransition = transition;
earliestMatcher = null;
}
}
}
if (earliestTransition != null) {
int transitionOffset = offset;
// the earliest start might be at the end for null transitions.
if (earliestStart < text.length()) {
transitionOffset += earliestStart;
}
next =
earliestTransition.computeNextContext(node, transitionOffset, context, earliestMatcher);
numCharsConsumed = earliestEnd;
} else {
throw SoyAutoescapeException.createWithNode(
"Error determining next state when encountering \"" + text + "\" in " + context,
// calculate a raw text node that points at the beginning of the string that couldn't
// bet matched.
node.substring(Integer.MAX_VALUE /* bogus id */, offset));
}
if (numCharsConsumed == 0 && next.state() == context.state()) {
throw new IllegalStateException("Infinite loop at `" + text + "` / " + context);
}
return Result.create(next, numCharsConsumed);
}
}
/**
* Encapsulates a grammar production and the context after that production is seen in a chunk of
* HTML/CSS/JS input.
*/
private abstract static class Transition {
// If both fields are null, then this is a special 'self transition' that matches the end of
// input. This is used to create a base case in matching multiple transitions.
/** Matches a pattern. */
@Nullable final Pattern pattern;
/** For matching a literal string. */
@Nullable final String literal;
Transition(Pattern pattern) {
this.pattern = pattern;
this.literal = null;
}
Transition(String literal) {
this.pattern = null;
this.literal = literal;
}
Transition() {
this.pattern = null;
this.literal = null;
}
/**
* True iff this transition can produce a context after the text in rawText[0:matcher.end()].
* This should not destructively modify the matcher. Specifically, it should not call {@code
* find()} again.
*
* @param prior The context before the start of the token in matcher.
* @param matcher The token matched by {@code this.pattern} or {@code null} if this transition
* uses a {@code literal}
*/
boolean isApplicableTo(Context prior, @Nullable Matcher matcher) {
return true;
}
/**
* Computes the context that this production transitions to after rawText[0:matcher.end()].
*
* @param originalNode The original raw text node
* @param offset The current offset into the node, useful for calculating better locations for
* error messages
* @param prior The context prior to the token in matcher.
* @param matcher The token matched by {@code this.pattern} or {@code null} if this transition
* uses a {@code literal}
* @return The context after the given token.
*/
Context computeNextContext(
RawTextNode originalNode, int offset, Context prior, @Nullable Matcher matcher) {
return computeNextContext(prior, matcher);
}
/**
* Computes the context that this production transitions to after rawText[0:matcher.end()].
*
* @param prior The context prior to the token in matcher.
* @param matcher The token matched by {@code this.pattern} or {@code null} if this transition
* uses a {@code literal}
* @return The context after the given token.
*/
Context computeNextContext(Context prior, @Nullable Matcher matcher) {
throw new AbstractMethodError();
}
@Override
public String toString() {
return MoreObjects.toStringHelper(this)
.add("pattern", pattern)
.add("literal", literal)
.omitNullValues()
.toString();
}
}
/** A transition to the given state. */
private static Transition makeTransitionToStateLiteral(String literal, HtmlContext state) {
return new Transition(literal) {
@Override
Context computeNextContext(Context prior, Matcher matcher) {
return prior.transitionToState(state);
}
};
}
private static Transition makeTransitionToState(Pattern regex, HtmlContext state) {
return new Transition(regex) {
@Override
Context computeNextContext(Context prior, Matcher matcher) {
return prior.transitionToState(state);
}
};
}
/** A transition to an state. */
private static Transition makeTransitionToError(Pattern regex, String message) {
return new Transition(regex) {
@Override
Context computeNextContext(RawTextNode node, int offset, Context prior, Matcher matcher) {
throw SoyAutoescapeException.createWithNode(
message, node.substring(Integer.MAX_VALUE, offset));
}
};
}
/** A transition that consumes some content without changing state. */
private static Transition makeTransitionToSelf(Pattern regex) {
return new Transition(regex) {
@Override
Context computeNextContext(Context prior, Matcher matcher) {
return prior;
}
};
}
/** Consumes the entire content without change if nothing else matched. */
private static final Transition TRANSITION_TO_SELF =
new Transition() {
@Override
Context computeNextContext(Context prior, Matcher matcher) {
return prior;
}
};
// Matching at the end is lowest possible precedence.
private static UriPart getNextUriPart(
RawTextNode node, int offset, UriPart uriPart, char matchChar) {
// This switch statement is designed to process a URI in order via a sequence of fall throughs.
switch (uriPart) {
case MAYBE_SCHEME:
case MAYBE_VARIABLE_SCHEME:
// From the RFC: https://tools.ietf.org/html/rfc3986#section-3.1
// scheme = ALPHA *( ALPHA / DIGIT / "+" / "-" / "." )
// At this point, our goal is to try to prove that we've safely left the scheme, and then
// transition to a more specific state.
if (matchChar == ':') {
// Ah, it looks like we might be able to conclude we've set the scheme, but...
if (uriPart == UriPart.MAYBE_VARIABLE_SCHEME) {
// At the start of a URL, and we already saw a print statement, and now we suddenly
// see a colon. While this could be relatively safe if it's a {$host}:{$port} pair,
// at compile-time, we can't be sure that "$host" isn't something like "javascript"
// and "$port" isn't "deleteMyAccount()".
throw SoyAutoescapeException.createWithNode(
"Soy can't safely process a URI that might start with a variable scheme. "
+ "For example, {$x}:{$y} could have an XSS if $x is 'javascript' and $y is "
+ "attacker-controlled. Either use a hard-coded scheme, or introduce "
+ "disambiguating characters (e.g. http://{$x}:{$y}, ./{$x}:{$y}, or "
+ "{$x}?foo=:{$y})",
node.substring(/* newId= */ Integer.MAX_VALUE, offset));
} else {
// At the start of the URL, and we just saw some hard-coded characters and a colon,
// like http:. This is safe (assuming it's a good scheme), and now we're on our way to
// the authority. Note if javascript: was seen, we would have scanned it already and
// entered a separate state (unless the developer is malicious and tries to obscure it
// via a conditional).
return UriPart.AUTHORITY_OR_PATH;
}
}
if (matchChar == '/') {
// Upon seeing a slash, it's impossible to set a valid scheme anymore. Either we're in the
// path, or we're starting a protocol-relative URI. (For all we know, we *could* be
// in the query, e.g. {$base}/foo if $base has a question mark, but sadly we have to go
// by what we know statically. However, usually query param groups tend to contain
// ampersands and equal signs, which we check for later heuristically.)
return UriPart.AUTHORITY_OR_PATH;
}
if ((matchChar == '=' || matchChar == '&') && uriPart == UriPart.MAYBE_VARIABLE_SCHEME) {
// This case is really special, and is only seen in cases like href="{$x}&foo={$y}" or
// href="{$x}foo={$y}". While in this case we can never be sure that we're in the query
// part, we do know two things:
//
// 1) We can't possibly set a dangerous scheme, since no valid scheme contains = or &
// 2) Within QUERY, all print statements are encoded as a URI component, which limits
// the damage that can be done; it can't even break into another path segment.
// Therefore, it is secure to assume this.
//
// Note we can safely handle ampersand even in HTML contexts because attribute values
// are processed unescaped.
return UriPart.QUERY;
}
// fall through
case AUTHORITY_OR_PATH:
// fall through
case UNKNOWN_PRE_FRAGMENT:
if (matchChar == '?') {
// Upon a ? we can be pretty sure we're in the query. While it's possible for something
// like {$base}?foo=bar to be in the fragment if $base contains a #, it's safe to assume
// we're in the query, because query params are escaped more strictly than the fragment.
return UriPart.QUERY;
}
// fall through
case QUERY:
case UNKNOWN:
if (matchChar == '#') {
// A # anywhere proves we're in the fragment, even if we're already in the fragment.
return UriPart.FRAGMENT;
}
// fall through
case FRAGMENT:
// No transitions for fragment.
return uriPart;
case DANGEROUS_SCHEME:
// Dangerous schemes remain dangerous.
return UriPart.DANGEROUS_SCHEME;
case TRUSTED_RESOURCE_URI_END:
throw new AssertionError("impossible");
case NONE:
// generally impossible
case START:
// fall-through. this should have been handled by our callers
}
throw new AssertionError("Unanticipated URI part: " + uriPart);
}
/**
* Transition between different parts of an http-like URL.
*
* <p>This happens on the first important URI character, or upon seeing the end of the raw text
* segment and not seeing anything else.
*/
private static final Transition URI_PART_TRANSITION =
new Transition(Pattern.compile("([:./&?=#])|\\z")) {
@Override
boolean isApplicableTo(Context prior, Matcher matcher) {
return prior.uriType() != UriType.TRUSTED_RESOURCE;
}
@Override
Context computeNextContext(RawTextNode node, int offset, Context prior, Matcher matcher) {
UriPart uriPart = prior.uriPart();
if (uriPart == UriPart.START) {
uriPart = UriPart.MAYBE_SCHEME;
}
String match = matcher.group(1);
if (match != null) {
checkState(match.length() == 1);
uriPart = getNextUriPart(node, offset, uriPart, match.charAt(0));
}
return prior.derive(uriPart);
}
};
/** Transition to detect dangerous URI schemes. */
private static final Transition URI_START_TRANSITION =
new Transition(Pattern.compile("(?i)^(javascript|data|blob|filesystem):")) {
@Override
boolean isApplicableTo(Context prior, Matcher matcher) {
return prior.uriPart() == UriPart.START && prior.uriType() != UriType.TRUSTED_RESOURCE;
}
@Override
Context computeNextContext(Context prior, Matcher matcher) {
// TODO(gboyer): Ban all but whitelisted schemes.
return prior.derive(UriPart.DANGEROUS_SCHEME);
}
};
/**
* Transition between different parts of a trusted resource uri http-like URL.
*
* <p>We don't use the normal URI derivation algorithm because for trusted_resource_uris we have
* stricter rules.
*
* <ul>
* <li>If a scheme is present, it must be {@code https}
* <li>We don't allow partial scheme or hosts
* <li>from URI START we must end up in AUTHORITY_OR_PATH, though in our case it really just
* means PATH
* </ul>
*/
private static class TrustedResourceUriPartTransition extends Transition {
private static final Pattern BASE_URL_PATTERN =
Pattern.compile(
"^((https:)?//[0-9a-z.:\\[\\]-]+/" // Origin.
+ "|/[^/\\\\]" // Absolute path.
+ "|[^:/\\\\]+/" // Relative path.
+ "|[^:/\\\\]*[?#]" // Query string or fragment.
+ "|about:blank#" // about:blank with fragment.
+ ")",
Pattern.CASE_INSENSITIVE);
TrustedResourceUriPartTransition(Pattern pattern) {
super(pattern);
}
/** Matches the whole string. */
TrustedResourceUriPartTransition() {
super();
}
@Override
boolean isApplicableTo(Context prior, @Nullable Matcher matcher) {
return prior.uriType() == UriType.TRUSTED_RESOURCE;
}
@Override
Context computeNextContext(
RawTextNode node, int offset, Context context, @Nullable Matcher matcher) {
String match = matcher == null ? node.getRawText().substring(offset) : matcher.group();
switch (context.uriPart()) {
case START:
// Most of the work is here. We expect the match to be one of the following forms:
// - https://foo/ NOTYPO
// - //foo/
// - Absolute or relative path.
// This emulates the behavior of the safevalues.trustedResourceUrl tag template function
// NOTE: In all cases we require that the fixed portion of the URL ends in path context.
// This is important to make sure that neither scheme nor host are potentially attacker
// controlled.
// Additionally, we will escapeUri all dynamic parts of the URL after this point which
// allows some things like query parameters to be set using untrusted content.
if (!BASE_URL_PATTERN.matcher(match).find()) {
// If the prefix is not allowed then we switch to UriPart.END meaning that we don't
// allow anything after this node (the whole URI must be fixed).
context = context.derive(UriPart.TRUSTED_RESOURCE_URI_END);
break;
} else {
context = context.derive(UriPart.AUTHORITY_OR_PATH);
}
// and fall-through
case AUTHORITY_OR_PATH:
int queryIndex = match.indexOf('?');
if (queryIndex == -1) {
// this might occur if a dynamic node ended with some query parameters
queryIndex = match.indexOf('&');
}
if (queryIndex != -1) {
context = context.derive(UriPart.QUERY);
}
// fall-through
case QUERY:
if (match.indexOf('#') == -1) {
// still in the query
return context;
}
context = context.derive(UriPart.FRAGMENT);
break;
case FRAGMENT:
// fragment is the end
return context;
case TRUSTED_RESOURCE_URI_END:
return context;
case DANGEROUS_SCHEME:
case MAYBE_SCHEME:
case MAYBE_VARIABLE_SCHEME:
case UNKNOWN:
case UNKNOWN_PRE_FRAGMENT:
throw SoyAutoescapeException.createWithNode(
"Cannot safely process this TrustedResourceUri at compile time. "
+ "TrustedResourceUris must have a statically identifiable scheme and host. "
+ "Either use a hard-coded scheme, or move the calculation of this URL outside "
+ "of the template and use an ordaining API.",
node.substring(/* newId= */ Integer.MAX_VALUE, offset));
case NONE:
throw new AssertionError("impossible");
}
return context;
}
}
/** Matches the beginning of a CSS URI with the delimiter, if any, in group 1. */
private static Transition makeCssUriTransition(Pattern regex, UriType uriType) {
return new Transition(regex) {
@Override
Context computeNextContext(Context prior, Matcher matcher) {
String delim = matcher.group(1);
HtmlContext state;
if ("\"".equals(delim)) {
state = HtmlContext.CSS_DQ_URI;
} else if ("'".equals(delim)) {
state = HtmlContext.CSS_SQ_URI;
} else {
state = HtmlContext.CSS_URI;
}
return prior
.toBuilder()
.withState(state)
.withUriType(uriType)
.withUriPart(UriPart.START)
.build();
}
};
}
private static final class JsLexerProcessor implements Processor {
static final JsLexerProcessor INSTANCE = new JsLexerProcessor();
@Override
public Result processText(Context context, RawTextNode node, int offset, String text) {
try {
Context next = JsLexerTokenManager.calculateTransitions(context, text, offset);
return Result.create(next, text.length() - offset);
} catch (LexerError le) {
throw SoyAutoescapeException.createWithNode(
le.getReason(),
node.substring(/* newId= */ Integer.MAX_VALUE, offset + le.getOffset()));
}
}
}
/**
* For each state, a group of rules for consuming raw text and how that affects the document
* context. The rules each have an associated pattern, and the rule whose pattern matches earliest
* in the text wins.
*/
private static final ImmutableMap<HtmlContext, Processor> TRANSITIONS =
ImmutableMap.<HtmlContext, Processor>builder()
// All edges in or out of pcdata, comment or attr value are triggered by nodes and thus
// handled by the InferenceEngine.
.put(HtmlContext.HTML_PCDATA, TransitionSetProcessor.of(TRANSITION_TO_SELF))
.put(HtmlContext.HTML_COMMENT, TransitionSetProcessor.of(TRANSITION_TO_SELF))
.put(HtmlContext.HTML_NORMAL_ATTR_VALUE, TransitionSetProcessor.of(TRANSITION_TO_SELF))
.put(
HtmlContext.HTML_META_REFRESH_CONTENT,
TransitionSetProcessor.of(
new Transition(
Pattern.compile("[,;] *(URL *=? *)?['\"]?", Pattern.CASE_INSENSITIVE)) {
@Override
Context computeNextContext(Context prior, Matcher matcher) {
return prior.toBuilder()
.withState(HtmlContext.URI)
.withUriType(UriType.REFRESH)
.withUriPart(UriPart.START)
.build();
}
},
TRANSITION_TO_SELF))
.put(
HtmlContext.HTML_HTML_ATTR_VALUE,
TransitionSetProcessor.of(
new Transition() {
@Override
Context computeNextContext(Context prior, Matcher matcher) {
return prior.derive(HtmlHtmlAttributePosition.NOT_START);
}
},
TRANSITION_TO_SELF))
// The CSS transitions below are based on http://www.w3.org/TR/css3-syntax/#lexical
.put(
HtmlContext.CSS,
TransitionSetProcessor.of(
makeTransitionToStateLiteral("/*", HtmlContext.CSS_COMMENT),
// TODO: Do we need to support non-standard but widely supported C++ style
// comments?
// gsearch -i -c -f '\.soy$' '<style[^>]*>[^<]*?\/\/' shows only 3 uses.
makeTransitionToStateLiteral("\"", HtmlContext.CSS_DQ_STRING),
makeTransitionToStateLiteral("'", HtmlContext.CSS_SQ_STRING),
// Although we don't contextually parse CSS, certain property names are only used
// in conjunction with images. This pretty basic regexp does a decent job on CSS
// that is not attempting to be malicious (for example, doesn't handle comments).
// Note that this can be fooled with {if 1}foo-{/if}background, but it's not worth
// really worrying about.
makeCssUriTransition(
Pattern.compile(
"(?i)(?:[^a-z0-9-]|^)\\s*"
+ "(?:background|background-image|border-image|content"
+ "|cursor|list-style|list-style-image)"
+ "\\s*:\\s*url\\s*\\(\\s*(['\"]?)"),
UriType.MEDIA),
makeCssUriTransition(
Pattern.compile("@import\\b(?:\\s+url\\s*\\()?\\s*(['\"]?)"),
UriType.TRUSTED_RESOURCE),
makeCssUriTransition(
Pattern.compile("(?i)\\burl\\s*\\(\\s*(['\"]?)"), UriType.NORMAL),
TRANSITION_TO_SELF))
.put(
HtmlContext.CSS_COMMENT,
TransitionSetProcessor.of(
makeTransitionToStateLiteral("*/", HtmlContext.CSS), TRANSITION_TO_SELF))
.put(
HtmlContext.CSS_DQ_STRING,
TransitionSetProcessor.of(
makeTransitionToStateLiteral("\"", HtmlContext.CSS),
makeTransitionToSelf(
Pattern.compile("\\\\(?:\r\n?|[\n\f\"])")), // Line continuation or escape.
makeTransitionToError(
Pattern.compile("[\n\r\f]"), "Newlines not permitted in string literals."),
TRANSITION_TO_SELF))
.put(
HtmlContext.CSS_SQ_STRING,
TransitionSetProcessor.of(
makeTransitionToStateLiteral("'", HtmlContext.CSS),
makeTransitionToSelf(
Pattern.compile("\\\\(?:\r\n?|[\n\f'])")), // Line continuation or escape.
makeTransitionToError(
Pattern.compile("[\n\r\f]"), "Newlines not permitted in string literals."),
TRANSITION_TO_SELF))
.put(
HtmlContext.CSS_URI,
TransitionSetProcessor.of(
makeTransitionToState(Pattern.compile("[\\)\\s]"), HtmlContext.CSS),
URI_PART_TRANSITION,
URI_START_TRANSITION,
new TrustedResourceUriPartTransition(Pattern.compile("[^);\n\r\f]+")),
makeTransitionToError(
Pattern.compile("[\"']"), "Quotes not permitted in CSS URIs.")))
.put(
HtmlContext.CSS_SQ_URI,
TransitionSetProcessor.of(
makeTransitionToStateLiteral("'", HtmlContext.CSS),
URI_PART_TRANSITION,
URI_START_TRANSITION,
new TrustedResourceUriPartTransition(Pattern.compile("[^'\n\r\f]+")),
makeTransitionToSelf(
Pattern.compile("\\\\(?:\r\n?|[\n\f'])")), // Line continuation or escape.
makeTransitionToError(
Pattern.compile("[\n\r\f]"), "Newlines not permitted in string literal.")))
.put(
HtmlContext.CSS_DQ_URI,
TransitionSetProcessor.of(
makeTransitionToStateLiteral("\"", HtmlContext.CSS),
URI_PART_TRANSITION,
URI_START_TRANSITION,
new TrustedResourceUriPartTransition(Pattern.compile("[^\n\r\f\"]+")),
makeTransitionToSelf(
Pattern.compile("\\\\(?:\r\n?|[\n\f\"])")), // Line continuation or escape.
makeTransitionToError(
Pattern.compile("[\n\r\f]"), "Newlines not permitted in string literal.")))
.put(HtmlContext.JS, JsLexerProcessor.INSTANCE)
.put(HtmlContext.JS_BLOCK_COMMENT, JsLexerProcessor.INSTANCE)
.put(HtmlContext.JS_LINE_COMMENT, JsLexerProcessor.INSTANCE)
.put(HtmlContext.JS_DQ_STRING, JsLexerProcessor.INSTANCE)
.put(HtmlContext.JS_SQ_STRING, JsLexerProcessor.INSTANCE)
.put(HtmlContext.JS_TEMPLATE_LITERAL, JsLexerProcessor.INSTANCE)
.put(HtmlContext.JS_REGEX, JsLexerProcessor.INSTANCE)
.put(
HtmlContext.URI,
TransitionSetProcessor.of(
URI_PART_TRANSITION,
URI_START_TRANSITION,
new TrustedResourceUriPartTransition()))
// All edges out of rcdata are triggered by tags which are handled in the InferenceEngine
.put(HtmlContext.HTML_RCDATA, TransitionSetProcessor.of(TRANSITION_TO_SELF))
.put(HtmlContext.HTML_SCRIPT_PHRASING_DATA, TransitionSetProcessor.of(TRANSITION_TO_SELF))
// Text context has no edges except to itself.
.put(HtmlContext.TEXT, TransitionSetProcessor.of(TRANSITION_TO_SELF))
.build();
// TODO: If we need to deal with untrusted templates, then we need to make sure that tokens like
// <!--, </script>, etc. are never split with empty strings.
// We could do this by walking all possible paths through each template (both branches for ifs,
// each case for switches, and the 0,1, and 2+ iteration case for loops).
// For each template, tokenize the original's rawText nodes using RawTextContextUpdater and then
// tokenize one single rawText node made by concatenating all rawText.
// If one contains a sensitive token, e.g. <!--/ and the other doesn't, then we have a potential
// splitting attack.
// That and disallow unquoted attributes, and be paranoid about prints especially in the TAG_NAME
// productions.
}
|
openjdk/jdk8 | 35,150 | jdk/src/share/classes/javax/sql/rowset/spi/SyncFactory.java | /*
* Copyright (c) 2003, 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package javax.sql.rowset.spi;
import java.util.logging.*;
import java.util.*;
import java.sql.*;
import javax.sql.*;
import java.io.FileInputStream;
import java.io.InputStream;
import java.io.IOException;
import java.io.FileNotFoundException;
import java.security.AccessController;
import java.security.PrivilegedAction;
import javax.naming.*;
/**
* The Service Provider Interface (SPI) mechanism that generates <code>SyncProvider</code>
* instances to be used by disconnected <code>RowSet</code> objects.
* The <code>SyncProvider</code> instances in turn provide the
* <code>javax.sql.RowSetReader</code> object the <code>RowSet</code> object
* needs to populate itself with data and the
* <code>javax.sql.RowSetWriter</code> object it needs to
* propagate changes to its
* data back to the underlying data source.
* <P>
* Because the methods in the <code>SyncFactory</code> class are all static,
* there is only one <code>SyncFactory</code> object
* per Java VM at any one time. This ensures that there is a single source from which a
* <code>RowSet</code> implementation can obtain its <code>SyncProvider</code>
* implementation.
*
* <h3>1.0 Overview</h3>
* The <code>SyncFactory</code> class provides an internal registry of available
* synchronization provider implementations (<code>SyncProvider</code> objects).
* This registry may be queried to determine which
* synchronization providers are available.
* The following line of code gets an enumeration of the providers currently registered.
* <PRE>
* java.util.Enumeration e = SyncFactory.getRegisteredProviders();
* </PRE>
* All standard <code>RowSet</code> implementations must provide at least two providers:
* <UL>
* <LI>an optimistic provider for use with a <code>CachedRowSet</code> implementation
* or an implementation derived from it
* <LI>an XML provider, which is used for reading and writing XML, such as with
* <code>WebRowSet</code> objects
* </UL>
* Note that the JDBC RowSet Implementations include the <code>SyncProvider</code>
* implementations <code>RIOptimisticProvider</code> and <code>RIXmlProvider</code>,
* which satisfy this requirement.
* <P>
* The <code>SyncFactory</code> class provides accessor methods to assist
* applications in determining which synchronization providers are currently
* registered with the <code>SyncFactory</code>.
* <p>
* Other methods let <code>RowSet</code> persistence providers be
* registered or de-registered with the factory mechanism. This
* allows additional synchronization provider implementations to be made
* available to <code>RowSet</code> objects at run time.
* <p>
* Applications can apply a degree of filtering to determine the level of
* synchronization that a <code>SyncProvider</code> implementation offers.
* The following criteria determine whether a provider is
* made available to a <code>RowSet</code> object:
* <ol>
* <li>If a particular provider is specified by a <code>RowSet</code> object, and
* the <code>SyncFactory</code> does not contain a reference to this provider,
* a <code>SyncFactoryException</code> is thrown stating that the synchronization
* provider could not be found.
*
* <li>If a <code>RowSet</code> implementation is instantiated with a specified
* provider and the specified provider has been properly registered, the
* requested provider is supplied. Otherwise a <code>SyncFactoryException</code>
* is thrown.
*
* <li>If a <code>RowSet</code> object does not specify a
* <code>SyncProvider</code> implementation and no additional
* <code>SyncProvider</code> implementations are available, the reference
* implementation providers are supplied.
* </ol>
* <h3>2.0 Registering <code>SyncProvider</code> Implementations</h3>
* <p>
* Both vendors and developers can register <code>SyncProvider</code>
* implementations using one of the following mechanisms.
* <ul>
* <LI><B>Using the command line</B><BR>
* The name of the provider is supplied on the command line, which will add
* the provider to the system properties.
* For example:
* <PRE>
* -Drowset.provider.classname=com.fred.providers.HighAvailabilityProvider
* </PRE>
* <li><b>Using the Standard Properties File</b><BR>
* The reference implementation is targeted
* to ship with J2SE 1.5, which will include an additional resource file
* that may be edited by hand. Here is an example of the properties file
* included in the reference implementation:
* <PRE>
* #Default JDBC RowSet sync providers listing
* #
*
* # Optimistic synchronization provider
* rowset.provider.classname.0=com.sun.rowset.providers.RIOptimisticProvider
* rowset.provider.vendor.0=Oracle Corporation
* rowset.provider.version.0=1.0
*
* # XML Provider using standard XML schema
* rowset.provider.classname.1=com.sun.rowset.providers.RIXMLProvider
* rowset.provider.vendor.1=Oracle Corporation
* rowset.provider.version.1=1.0
* </PRE>
* The <code>SyncFactory</code> checks this file and registers the
* <code>SyncProvider</code> implementations that it contains. A
* developer or vendor can add other implementations to this file.
* For example, here is a possible addition:
* <PRE>
* rowset.provider.classname.2=com.fred.providers.HighAvailabilityProvider
* rowset.provider.vendor.2=Fred, Inc.
* rowset.provider.version.2=1.0
* </PRE>
*
* <li><b>Using a JNDI Context</b><BR>
* Available providers can be registered on a JNDI
* context, and the <code>SyncFactory</code> will attempt to load
* <code>SyncProvider</code> implementations from that JNDI context.
* For example, the following code fragment registers a provider implementation
* on a JNDI context. This is something a deployer would normally do. In this
* example, <code>MyProvider</code> is being registered on a CosNaming
* namespace, which is the namespace used by J2EE resources.
* <PRE>
* import javax.naming.*;
*
* Hashtable svrEnv = new Hashtable();
* srvEnv.put(Context.INITIAL_CONTEXT_FACTORY, "CosNaming");
*
* Context ctx = new InitialContext(svrEnv);
* com.fred.providers.MyProvider = new MyProvider();
* ctx.rebind("providers/MyProvider", syncProvider);
* </PRE>
* </ul>
* Next, an application will register the JNDI context with the
* <code>SyncFactory</code> instance. This allows the <code>SyncFactory</code>
* to browse within the JNDI context looking for <code>SyncProvider</code>
* implementations.
* <PRE>
* Hashtable appEnv = new Hashtable();
* appEnv.put(Context.INITIAL_CONTEXT_FACTORY, "CosNaming");
* appEnv.put(Context.PROVIDER_URL, "iiop://hostname/providers");
* Context ctx = new InitialContext(appEnv);
*
* SyncFactory.registerJNDIContext(ctx);
* </PRE>
* If a <code>RowSet</code> object attempts to obtain a <code>MyProvider</code>
* object, the <code>SyncFactory</code> will try to locate it. First it searches
* for it in the system properties, then it looks in the resource files, and
* finally it checks the JNDI context that has been set. The <code>SyncFactory</code>
* instance verifies that the requested provider is a valid extension of the
* <code>SyncProvider</code> abstract class and then gives it to the
* <code>RowSet</code> object. In the following code fragment, a new
* <code>CachedRowSet</code> object is created and initialized with
* <i>env</i>, which contains the binding to <code>MyProvider</code>.
* <PRE>
* Hashtable env = new Hashtable();
* env.put(SyncFactory.ROWSET_SYNC_PROVIDER, "com.fred.providers.MyProvider");
* CachedRowSet crs = new com.sun.rowset.CachedRowSetImpl(env);
* </PRE>
* Further details on these mechanisms are available in the
* <code>javax.sql.rowset.spi</code> package specification.
*
* @author Jonathan Bruce
* @see javax.sql.rowset.spi.SyncProvider
* @see javax.sql.rowset.spi.SyncFactoryException
*/
public class SyncFactory {
/**
* Creates a new <code>SyncFactory</code> object, which is the singleton
* instance.
* Having a private constructor guarantees that no more than
* one <code>SyncProvider</code> object can exist at a time.
*/
private SyncFactory() {
}
/**
* The standard property-id for a synchronization provider implementation
* name.
*/
public static final String ROWSET_SYNC_PROVIDER =
"rowset.provider.classname";
/**
* The standard property-id for a synchronization provider implementation
* vendor name.
*/
public static final String ROWSET_SYNC_VENDOR =
"rowset.provider.vendor";
/**
* The standard property-id for a synchronization provider implementation
* version tag.
*/
public static final String ROWSET_SYNC_PROVIDER_VERSION =
"rowset.provider.version";
/**
* The standard resource file name.
*/
private static String ROWSET_PROPERTIES = "rowset.properties";
/**
* Permission required to invoke setJNDIContext and setLogger
*/
private static final SQLPermission SET_SYNCFACTORY_PERMISSION =
new SQLPermission("setSyncFactory");
/**
* The initial JNDI context where <code>SyncProvider</code> implementations can
* be stored and from which they can be invoked.
*/
private static Context ic;
/**
* The <code>Logger</code> object to be used by the <code>SyncFactory</code>.
*/
private static volatile Logger rsLogger;
/**
* The registry of available <code>SyncProvider</code> implementations.
* See section 2.0 of the class comment for <code>SyncFactory</code> for an
* explanation of how a provider can be added to this registry.
*/
private static Hashtable<String, SyncProvider> implementations;
/**
* Adds the the given synchronization provider to the factory register. Guidelines
* are provided in the <code>SyncProvider</code> specification for the
* required naming conventions for <code>SyncProvider</code>
* implementations.
* <p>
* Synchronization providers bound to a JNDI context can be
* registered by binding a SyncProvider instance to a JNDI namespace.
*
* <pre>
* {@code
* SyncProvider p = new MySyncProvider();
* InitialContext ic = new InitialContext();
* ic.bind ("jdbc/rowset/MySyncProvider", p);
* } </pre>
*
* Furthermore, an initial JNDI context should be set with the
* <code>SyncFactory</code> using the <code>setJNDIContext</code> method.
* The <code>SyncFactory</code> leverages this context to search for
* available <code>SyncProvider</code> objects bound to the JNDI
* context and its child nodes.
*
* @param providerID A <code>String</code> object with the unique ID of the
* synchronization provider being registered
* @throws SyncFactoryException if an attempt is made to supply an empty
* or null provider name
* @see #setJNDIContext
*/
public static synchronized void registerProvider(String providerID)
throws SyncFactoryException {
ProviderImpl impl = new ProviderImpl();
impl.setClassname(providerID);
initMapIfNecessary();
implementations.put(providerID, impl);
}
/**
* Returns the <code>SyncFactory</code> singleton.
*
* @return the <code>SyncFactory</code> instance
*/
public static SyncFactory getSyncFactory() {
/*
* Using Initialization on Demand Holder idiom as
* Effective Java 2nd Edition,ITEM 71, indicates it is more performant
* than the Double-Check Locking idiom.
*/
return SyncFactoryHolder.factory;
}
/**
* Removes the designated currently registered synchronization provider from the
* Factory SPI register.
*
* @param providerID The unique-id of the synchronization provider
* @throws SyncFactoryException If an attempt is made to
* unregister a SyncProvider implementation that was not registered.
*/
public static synchronized void unregisterProvider(String providerID)
throws SyncFactoryException {
initMapIfNecessary();
if (implementations.containsKey(providerID)) {
implementations.remove(providerID);
}
}
private static String colon = ":";
private static String strFileSep = "/";
private static synchronized void initMapIfNecessary() throws SyncFactoryException {
// Local implementation class names and keys from Properties
// file, translate names into Class objects using Class.forName
// and store mappings
Properties properties = new Properties();
if (implementations == null) {
implementations = new Hashtable<>();
try {
// check if user is supplying his Synchronisation Provider
// Implementation if not using Oracle's implementation.
// properties.load(new FileInputStream(ROWSET_PROPERTIES));
// The rowset.properties needs to be in jdk/jre/lib when
// integrated with jdk.
// else it should be picked from -D option from command line.
// -Drowset.properties will add to standard properties. Similar
// keys will over-write
/*
* Dependent on application
*/
String strRowsetProperties;
try {
strRowsetProperties = AccessController.doPrivileged(new PrivilegedAction<String>() {
public String run() {
return System.getProperty("rowset.properties");
}
}, null, new PropertyPermission("rowset.properties","read"));
} catch (Exception ex) {
strRowsetProperties = null;
}
if (strRowsetProperties != null) {
// Load user's implementation of SyncProvider
// here. -Drowset.properties=/abc/def/pqr.txt
ROWSET_PROPERTIES = strRowsetProperties;
try (FileInputStream fis = new FileInputStream(ROWSET_PROPERTIES)) {
properties.load(fis);
}
parseProperties(properties);
}
/*
* Always available
*/
ROWSET_PROPERTIES = "javax" + strFileSep + "sql" +
strFileSep + "rowset" + strFileSep +
"rowset.properties";
ClassLoader cl = Thread.currentThread().getContextClassLoader();
try (InputStream stream =
(cl == null) ? ClassLoader.getSystemResourceAsStream(ROWSET_PROPERTIES)
: cl.getResourceAsStream(ROWSET_PROPERTIES)) {
if (stream == null) {
throw new SyncFactoryException(
"Resource " + ROWSET_PROPERTIES + " not found");
}
properties.load(stream);
}
parseProperties(properties);
// removed else, has properties should sum together
} catch (FileNotFoundException e) {
throw new SyncFactoryException("Cannot locate properties file: " + e);
} catch (IOException e) {
throw new SyncFactoryException("IOException: " + e);
}
/*
* Now deal with -Drowset.provider.classname
* load additional properties from -D command line
*/
properties.clear();
String providerImpls;
try {
providerImpls = AccessController.doPrivileged(new PrivilegedAction<String>() {
public String run() {
return System.getProperty(ROWSET_SYNC_PROVIDER);
}
}, null, new PropertyPermission(ROWSET_SYNC_PROVIDER,"read"));
} catch (Exception ex) {
providerImpls = null;
}
if (providerImpls != null) {
int i = 0;
if (providerImpls.indexOf(colon) > 0) {
StringTokenizer tokenizer = new StringTokenizer(providerImpls, colon);
while (tokenizer.hasMoreElements()) {
properties.put(ROWSET_SYNC_PROVIDER + "." + i, tokenizer.nextToken());
i++;
}
} else {
properties.put(ROWSET_SYNC_PROVIDER, providerImpls);
}
parseProperties(properties);
}
}
}
/**
* The internal debug switch.
*/
private static boolean debug = false;
/**
* Internal registry count for the number of providers contained in the
* registry.
*/
private static int providerImplIndex = 0;
/**
* Internal handler for all standard property parsing. Parses standard
* ROWSET properties and stores lazy references into the the internal registry.
*/
private static void parseProperties(Properties p) {
ProviderImpl impl = null;
String key = null;
String[] propertyNames = null;
for (Enumeration<?> e = p.propertyNames(); e.hasMoreElements();) {
String str = (String) e.nextElement();
int w = str.length();
if (str.startsWith(SyncFactory.ROWSET_SYNC_PROVIDER)) {
impl = new ProviderImpl();
impl.setIndex(providerImplIndex++);
if (w == (SyncFactory.ROWSET_SYNC_PROVIDER).length()) {
// no property index has been set.
propertyNames = getPropertyNames(false);
} else {
// property index has been set.
propertyNames = getPropertyNames(true, str.substring(w - 1));
}
key = p.getProperty(propertyNames[0]);
impl.setClassname(key);
impl.setVendor(p.getProperty(propertyNames[1]));
impl.setVersion(p.getProperty(propertyNames[2]));
implementations.put(key, impl);
}
}
}
/**
* Used by the parseProperties methods to disassemble each property tuple.
*/
private static String[] getPropertyNames(boolean append) {
return getPropertyNames(append, null);
}
/**
* Disassembles each property and its associated value. Also handles
* overloaded property names that contain indexes.
*/
private static String[] getPropertyNames(boolean append,
String propertyIndex) {
String dot = ".";
String[] propertyNames =
new String[]{SyncFactory.ROWSET_SYNC_PROVIDER,
SyncFactory.ROWSET_SYNC_VENDOR,
SyncFactory.ROWSET_SYNC_PROVIDER_VERSION};
if (append) {
for (int i = 0; i < propertyNames.length; i++) {
propertyNames[i] = propertyNames[i] +
dot +
propertyIndex;
}
return propertyNames;
} else {
return propertyNames;
}
}
/**
* Internal debug method that outputs the registry contents.
*/
private static void showImpl(ProviderImpl impl) {
System.out.println("Provider implementation:");
System.out.println("Classname: " + impl.getClassname());
System.out.println("Vendor: " + impl.getVendor());
System.out.println("Version: " + impl.getVersion());
System.out.println("Impl index: " + impl.getIndex());
}
/**
* Returns the <code>SyncProvider</code> instance identified by <i>providerID</i>.
*
* @param providerID the unique identifier of the provider
* @return a <code>SyncProvider</code> implementation
* @throws SyncFactoryException If the SyncProvider cannot be found,
* the providerID is {@code null}, or
* some error was encountered when trying to invoke this provider.
*/
public static SyncProvider getInstance(String providerID)
throws SyncFactoryException {
if(providerID == null) {
throw new SyncFactoryException("The providerID cannot be null");
}
initMapIfNecessary(); // populate HashTable
initJNDIContext(); // check JNDI context for any additional bindings
ProviderImpl impl = (ProviderImpl) implementations.get(providerID);
if (impl == null) {
// Requested SyncProvider is unavailable. Return default provider.
return new com.sun.rowset.providers.RIOptimisticProvider();
}
// Attempt to invoke classname from registered SyncProvider list
Class<?> c = null;
try {
ClassLoader cl = Thread.currentThread().getContextClassLoader();
/**
* The SyncProvider implementation of the user will be in
* the classpath. We need to find the ClassLoader which loads
* this SyncFactory and try to laod the SyncProvider class from
* there.
**/
c = Class.forName(providerID, true, cl);
if (c != null) {
return (SyncProvider) c.newInstance();
} else {
return new com.sun.rowset.providers.RIOptimisticProvider();
}
} catch (IllegalAccessException e) {
throw new SyncFactoryException("IllegalAccessException: " + e.getMessage());
} catch (InstantiationException e) {
throw new SyncFactoryException("InstantiationException: " + e.getMessage());
} catch (ClassNotFoundException e) {
throw new SyncFactoryException("ClassNotFoundException: " + e.getMessage());
}
}
/**
* Returns an Enumeration of currently registered synchronization
* providers. A <code>RowSet</code> implementation may use any provider in
* the enumeration as its <code>SyncProvider</code> object.
* <p>
* At a minimum, the reference synchronization provider allowing
* RowSet content data to be stored using a JDBC driver should be
* possible.
*
* @return Enumeration A enumeration of available synchronization
* providers that are registered with this Factory
* @throws SyncFactoryException If an error occurs obtaining the registered
* providers
*/
public static Enumeration<SyncProvider> getRegisteredProviders()
throws SyncFactoryException {
initMapIfNecessary();
// return a collection of classnames
// of type SyncProvider
return implementations.elements();
}
/**
* Sets the logging object to be used by the <code>SyncProvider</code>
* implementation provided by the <code>SyncFactory</code>. All
* <code>SyncProvider</code> implementations can log their events to
* this object and the application can retrieve a handle to this
* object using the <code>getLogger</code> method.
* <p>
* This method checks to see that there is an {@code SQLPermission}
* object which grants the permission {@code setSyncFactory}
* before allowing the method to succeed. If a
* {@code SecurityManager} exists and its
* {@code checkPermission} method denies calling {@code setLogger},
* this method throws a
* {@code java.lang.SecurityException}.
*
* @param logger A Logger object instance
* @throws java.lang.SecurityException if a security manager exists and its
* {@code checkPermission} method denies calling {@code setLogger}
* @throws NullPointerException if the logger is null
* @see SecurityManager#checkPermission
*/
public static void setLogger(Logger logger) {
SecurityManager sec = System.getSecurityManager();
if (sec != null) {
sec.checkPermission(SET_SYNCFACTORY_PERMISSION);
}
if(logger == null){
throw new NullPointerException("You must provide a Logger");
}
rsLogger = logger;
}
/**
* Sets the logging object that is used by <code>SyncProvider</code>
* implementations provided by the <code>SyncFactory</code> SPI. All
* <code>SyncProvider</code> implementations can log their events
* to this object and the application can retrieve a handle to this
* object using the <code>getLogger</code> method.
* <p>
* This method checks to see that there is an {@code SQLPermission}
* object which grants the permission {@code setSyncFactory}
* before allowing the method to succeed. If a
* {@code SecurityManager} exists and its
* {@code checkPermission} method denies calling {@code setLogger},
* this method throws a
* {@code java.lang.SecurityException}.
*
* @param logger a Logger object instance
* @param level a Level object instance indicating the degree of logging
* required
* @throws java.lang.SecurityException if a security manager exists and its
* {@code checkPermission} method denies calling {@code setLogger}
* @throws NullPointerException if the logger is null
* @see SecurityManager#checkPermission
* @see LoggingPermission
*/
public static void setLogger(Logger logger, Level level) {
// singleton
SecurityManager sec = System.getSecurityManager();
if (sec != null) {
sec.checkPermission(SET_SYNCFACTORY_PERMISSION);
}
if(logger == null){
throw new NullPointerException("You must provide a Logger");
}
logger.setLevel(level);
rsLogger = logger;
}
/**
* Returns the logging object for applications to retrieve
* synchronization events posted by SyncProvider implementations.
* @return The {@code Logger} that has been specified for use by
* {@code SyncProvider} implementations
* @throws SyncFactoryException if no logging object has been set.
*/
public static Logger getLogger() throws SyncFactoryException {
Logger result = rsLogger;
// only one logger per session
if (result == null) {
throw new SyncFactoryException("(SyncFactory) : No logger has been set");
}
return result;
}
/**
* Sets the initial JNDI context from which SyncProvider implementations
* can be retrieved from a JNDI namespace
* <p>
* This method checks to see that there is an {@code SQLPermission}
* object which grants the permission {@code setSyncFactory}
* before allowing the method to succeed. If a
* {@code SecurityManager} exists and its
* {@code checkPermission} method denies calling {@code setJNDIContext},
* this method throws a
* {@code java.lang.SecurityException}.
*
* @param ctx a valid JNDI context
* @throws SyncFactoryException if the supplied JNDI context is null
* @throws java.lang.SecurityException if a security manager exists and its
* {@code checkPermission} method denies calling {@code setJNDIContext}
* @see SecurityManager#checkPermission
*/
public static synchronized void setJNDIContext(javax.naming.Context ctx)
throws SyncFactoryException {
SecurityManager sec = System.getSecurityManager();
if (sec != null) {
sec.checkPermission(SET_SYNCFACTORY_PERMISSION);
}
if (ctx == null) {
throw new SyncFactoryException("Invalid JNDI context supplied");
}
ic = ctx;
}
/**
* Controls JNDI context initialization.
*
* @throws SyncFactoryException if an error occurs parsing the JNDI context
*/
private static synchronized void initJNDIContext() throws SyncFactoryException {
if ((ic != null) && (lazyJNDICtxRefresh == false)) {
try {
parseProperties(parseJNDIContext());
lazyJNDICtxRefresh = true; // touch JNDI namespace once.
} catch (NamingException e) {
e.printStackTrace();
throw new SyncFactoryException("SPI: NamingException: " + e.getExplanation());
} catch (Exception e) {
e.printStackTrace();
throw new SyncFactoryException("SPI: Exception: " + e.getMessage());
}
}
}
/**
* Internal switch indicating whether the JNDI namespace should be re-read.
*/
private static boolean lazyJNDICtxRefresh = false;
/**
* Parses the set JNDI Context and passes bindings to the enumerateBindings
* method when complete.
*/
private static Properties parseJNDIContext() throws NamingException {
NamingEnumeration<?> bindings = ic.listBindings("");
Properties properties = new Properties();
// Hunt one level below context for available SyncProvider objects
enumerateBindings(bindings, properties);
return properties;
}
/**
* Scans each binding on JNDI context and determines if any binding is an
* instance of SyncProvider, if so, add this to the registry and continue to
* scan the current context using a re-entrant call to this method until all
* bindings have been enumerated.
*/
private static void enumerateBindings(NamingEnumeration<?> bindings,
Properties properties) throws NamingException {
boolean syncProviderObj = false; // move to parameters ?
try {
Binding bd = null;
Object elementObj = null;
String element = null;
while (bindings.hasMore()) {
bd = (Binding) bindings.next();
element = bd.getName();
elementObj = bd.getObject();
if (!(ic.lookup(element) instanceof Context)) {
// skip directories/sub-contexts
if (ic.lookup(element) instanceof SyncProvider) {
syncProviderObj = true;
}
}
if (syncProviderObj) {
SyncProvider sync = (SyncProvider) elementObj;
properties.put(SyncFactory.ROWSET_SYNC_PROVIDER,
sync.getProviderID());
syncProviderObj = false; // reset
}
}
} catch (javax.naming.NotContextException e) {
bindings.next();
// Re-entrant call into method
enumerateBindings(bindings, properties);
}
}
/**
* Lazy initialization Holder class used by {@code getSyncFactory}
*/
private static class SyncFactoryHolder {
static final SyncFactory factory = new SyncFactory();
}
}
/**
* Internal class that defines the lazy reference construct for each registered
* SyncProvider implementation.
*/
class ProviderImpl extends SyncProvider {
private String className = null;
private String vendorName = null;
private String ver = null;
private int index;
public void setClassname(String classname) {
className = classname;
}
public String getClassname() {
return className;
}
public void setVendor(String vendor) {
vendorName = vendor;
}
public String getVendor() {
return vendorName;
}
public void setVersion(String providerVer) {
ver = providerVer;
}
public String getVersion() {
return ver;
}
public void setIndex(int i) {
index = i;
}
public int getIndex() {
return index;
}
public int getDataSourceLock() throws SyncProviderException {
int dsLock = 0;
try {
dsLock = SyncFactory.getInstance(className).getDataSourceLock();
} catch (SyncFactoryException sfEx) {
throw new SyncProviderException(sfEx.getMessage());
}
return dsLock;
}
public int getProviderGrade() {
int grade = 0;
try {
grade = SyncFactory.getInstance(className).getProviderGrade();
} catch (SyncFactoryException sfEx) {
//
}
return grade;
}
public String getProviderID() {
return className;
}
/*
public javax.sql.RowSetInternal getRowSetInternal() {
try
{
return SyncFactory.getInstance(className).getRowSetInternal();
} catch(SyncFactoryException sfEx) {
//
}
}
*/
public javax.sql.RowSetReader getRowSetReader() {
RowSetReader rsReader = null;
try {
rsReader = SyncFactory.getInstance(className).getRowSetReader();
} catch (SyncFactoryException sfEx) {
//
}
return rsReader;
}
public javax.sql.RowSetWriter getRowSetWriter() {
RowSetWriter rsWriter = null;
try {
rsWriter = SyncFactory.getInstance(className).getRowSetWriter();
} catch (SyncFactoryException sfEx) {
//
}
return rsWriter;
}
public void setDataSourceLock(int param)
throws SyncProviderException {
try {
SyncFactory.getInstance(className).setDataSourceLock(param);
} catch (SyncFactoryException sfEx) {
throw new SyncProviderException(sfEx.getMessage());
}
}
public int supportsUpdatableView() {
int view = 0;
try {
view = SyncFactory.getInstance(className).supportsUpdatableView();
} catch (SyncFactoryException sfEx) {
//
}
return view;
}
}
|
googleapis/sdk-platform-java | 35,243 | java-showcase/gapic-showcase/src/main/java/com/google/showcase/v1beta1/stub/TestingStubSettings.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.showcase.v1beta1.stub;
import static com.google.showcase.v1beta1.TestingClient.ListLocationsPagedResponse;
import static com.google.showcase.v1beta1.TestingClient.ListSessionsPagedResponse;
import static com.google.showcase.v1beta1.TestingClient.ListTestsPagedResponse;
import com.google.api.core.ApiFunction;
import com.google.api.core.ApiFuture;
import com.google.api.core.BetaApi;
import com.google.api.core.ObsoleteApi;
import com.google.api.gax.core.GaxProperties;
import com.google.api.gax.core.GoogleCredentialsProvider;
import com.google.api.gax.core.InstantiatingExecutorProvider;
import com.google.api.gax.grpc.GaxGrpcProperties;
import com.google.api.gax.grpc.GrpcTransportChannel;
import com.google.api.gax.grpc.InstantiatingGrpcChannelProvider;
import com.google.api.gax.httpjson.GaxHttpJsonProperties;
import com.google.api.gax.httpjson.HttpJsonTransportChannel;
import com.google.api.gax.httpjson.InstantiatingHttpJsonChannelProvider;
import com.google.api.gax.retrying.RetrySettings;
import com.google.api.gax.rpc.ApiCallContext;
import com.google.api.gax.rpc.ApiClientHeaderProvider;
import com.google.api.gax.rpc.ClientContext;
import com.google.api.gax.rpc.PageContext;
import com.google.api.gax.rpc.PagedCallSettings;
import com.google.api.gax.rpc.PagedListDescriptor;
import com.google.api.gax.rpc.PagedListResponseFactory;
import com.google.api.gax.rpc.StatusCode;
import com.google.api.gax.rpc.StubSettings;
import com.google.api.gax.rpc.TransportChannelProvider;
import com.google.api.gax.rpc.UnaryCallSettings;
import com.google.api.gax.rpc.UnaryCallable;
import com.google.cloud.location.GetLocationRequest;
import com.google.cloud.location.ListLocationsRequest;
import com.google.cloud.location.ListLocationsResponse;
import com.google.cloud.location.Location;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
import com.google.iam.v1.GetIamPolicyRequest;
import com.google.iam.v1.Policy;
import com.google.iam.v1.SetIamPolicyRequest;
import com.google.iam.v1.TestIamPermissionsRequest;
import com.google.iam.v1.TestIamPermissionsResponse;
import com.google.protobuf.Empty;
import com.google.showcase.v1beta1.CreateSessionRequest;
import com.google.showcase.v1beta1.DeleteSessionRequest;
import com.google.showcase.v1beta1.DeleteTestRequest;
import com.google.showcase.v1beta1.GetSessionRequest;
import com.google.showcase.v1beta1.ListSessionsRequest;
import com.google.showcase.v1beta1.ListSessionsResponse;
import com.google.showcase.v1beta1.ListTestsRequest;
import com.google.showcase.v1beta1.ListTestsResponse;
import com.google.showcase.v1beta1.ReportSessionRequest;
import com.google.showcase.v1beta1.ReportSessionResponse;
import com.google.showcase.v1beta1.Session;
import com.google.showcase.v1beta1.Test;
import com.google.showcase.v1beta1.VerifyTestRequest;
import com.google.showcase.v1beta1.VerifyTestResponse;
import java.io.IOException;
import java.util.List;
import javax.annotation.Generated;
// AUTO-GENERATED DOCUMENTATION AND CLASS.
/**
* Settings class to configure an instance of {@link TestingStub}.
*
* <p>The default instance has everything set to sensible defaults:
*
* <ul>
* <li>The default service address (localhost) and default port (7469) are used.
* <li>Credentials are acquired automatically through Application Default Credentials.
* <li>Retries are configured for idempotent methods but not for non-idempotent methods.
* </ul>
*
* <p>The builder of this class is recursive, so contained classes are themselves builders. When
* build() is called, the tree of builders is called to create the complete settings object.
*
* <p>For example, to set the
* [RetrySettings](https://cloud.google.com/java/docs/reference/gax/latest/com.google.api.gax.retrying.RetrySettings)
* of createSession:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* TestingStubSettings.Builder testingSettingsBuilder = TestingStubSettings.newBuilder();
* testingSettingsBuilder
* .createSessionSettings()
* .setRetrySettings(
* testingSettingsBuilder
* .createSessionSettings()
* .getRetrySettings()
* .toBuilder()
* .setInitialRetryDelayDuration(Duration.ofSeconds(1))
* .setInitialRpcTimeoutDuration(Duration.ofSeconds(5))
* .setMaxAttempts(5)
* .setMaxRetryDelayDuration(Duration.ofSeconds(30))
* .setMaxRpcTimeoutDuration(Duration.ofSeconds(60))
* .setRetryDelayMultiplier(1.3)
* .setRpcTimeoutMultiplier(1.5)
* .setTotalTimeoutDuration(Duration.ofSeconds(300))
* .build());
* TestingStubSettings testingSettings = testingSettingsBuilder.build();
* }</pre>
*
* Please refer to the [Client Side Retry
* Guide](https://github.com/googleapis/google-cloud-java/blob/main/docs/client_retries.md) for
* additional support in setting retries.
*/
@BetaApi
@Generated("by gapic-generator-java")
public class TestingStubSettings extends StubSettings<TestingStubSettings> {
/** The default scopes of the service. */
private static final ImmutableList<String> DEFAULT_SERVICE_SCOPES =
ImmutableList.<String>builder().build();
private final UnaryCallSettings<CreateSessionRequest, Session> createSessionSettings;
private final UnaryCallSettings<GetSessionRequest, Session> getSessionSettings;
private final PagedCallSettings<
ListSessionsRequest, ListSessionsResponse, ListSessionsPagedResponse>
listSessionsSettings;
private final UnaryCallSettings<DeleteSessionRequest, Empty> deleteSessionSettings;
private final UnaryCallSettings<ReportSessionRequest, ReportSessionResponse>
reportSessionSettings;
private final PagedCallSettings<ListTestsRequest, ListTestsResponse, ListTestsPagedResponse>
listTestsSettings;
private final UnaryCallSettings<DeleteTestRequest, Empty> deleteTestSettings;
private final UnaryCallSettings<VerifyTestRequest, VerifyTestResponse> verifyTestSettings;
private final PagedCallSettings<
ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse>
listLocationsSettings;
private final UnaryCallSettings<GetLocationRequest, Location> getLocationSettings;
private final UnaryCallSettings<SetIamPolicyRequest, Policy> setIamPolicySettings;
private final UnaryCallSettings<GetIamPolicyRequest, Policy> getIamPolicySettings;
private final UnaryCallSettings<TestIamPermissionsRequest, TestIamPermissionsResponse>
testIamPermissionsSettings;
private static final PagedListDescriptor<ListSessionsRequest, ListSessionsResponse, Session>
LIST_SESSIONS_PAGE_STR_DESC =
new PagedListDescriptor<ListSessionsRequest, ListSessionsResponse, Session>() {
@Override
public String emptyToken() {
return "";
}
@Override
public ListSessionsRequest injectToken(ListSessionsRequest payload, String token) {
return ListSessionsRequest.newBuilder(payload).setPageToken(token).build();
}
@Override
public ListSessionsRequest injectPageSize(ListSessionsRequest payload, int pageSize) {
return ListSessionsRequest.newBuilder(payload).setPageSize(pageSize).build();
}
@Override
public Integer extractPageSize(ListSessionsRequest payload) {
return payload.getPageSize();
}
@Override
public String extractNextToken(ListSessionsResponse payload) {
return payload.getNextPageToken();
}
@Override
public Iterable<Session> extractResources(ListSessionsResponse payload) {
return payload.getSessionsList();
}
};
private static final PagedListDescriptor<ListTestsRequest, ListTestsResponse, Test>
LIST_TESTS_PAGE_STR_DESC =
new PagedListDescriptor<ListTestsRequest, ListTestsResponse, Test>() {
@Override
public String emptyToken() {
return "";
}
@Override
public ListTestsRequest injectToken(ListTestsRequest payload, String token) {
return ListTestsRequest.newBuilder(payload).setPageToken(token).build();
}
@Override
public ListTestsRequest injectPageSize(ListTestsRequest payload, int pageSize) {
return ListTestsRequest.newBuilder(payload).setPageSize(pageSize).build();
}
@Override
public Integer extractPageSize(ListTestsRequest payload) {
return payload.getPageSize();
}
@Override
public String extractNextToken(ListTestsResponse payload) {
return payload.getNextPageToken();
}
@Override
public Iterable<Test> extractResources(ListTestsResponse payload) {
return payload.getTestsList();
}
};
private static final PagedListDescriptor<ListLocationsRequest, ListLocationsResponse, Location>
LIST_LOCATIONS_PAGE_STR_DESC =
new PagedListDescriptor<ListLocationsRequest, ListLocationsResponse, Location>() {
@Override
public String emptyToken() {
return "";
}
@Override
public ListLocationsRequest injectToken(ListLocationsRequest payload, String token) {
return ListLocationsRequest.newBuilder(payload).setPageToken(token).build();
}
@Override
public ListLocationsRequest injectPageSize(ListLocationsRequest payload, int pageSize) {
return ListLocationsRequest.newBuilder(payload).setPageSize(pageSize).build();
}
@Override
public Integer extractPageSize(ListLocationsRequest payload) {
return payload.getPageSize();
}
@Override
public String extractNextToken(ListLocationsResponse payload) {
return payload.getNextPageToken();
}
@Override
public Iterable<Location> extractResources(ListLocationsResponse payload) {
return payload.getLocationsList();
}
};
private static final PagedListResponseFactory<
ListSessionsRequest, ListSessionsResponse, ListSessionsPagedResponse>
LIST_SESSIONS_PAGE_STR_FACT =
new PagedListResponseFactory<
ListSessionsRequest, ListSessionsResponse, ListSessionsPagedResponse>() {
@Override
public ApiFuture<ListSessionsPagedResponse> getFuturePagedResponse(
UnaryCallable<ListSessionsRequest, ListSessionsResponse> callable,
ListSessionsRequest request,
ApiCallContext context,
ApiFuture<ListSessionsResponse> futureResponse) {
PageContext<ListSessionsRequest, ListSessionsResponse, Session> pageContext =
PageContext.create(callable, LIST_SESSIONS_PAGE_STR_DESC, request, context);
return ListSessionsPagedResponse.createAsync(pageContext, futureResponse);
}
};
private static final PagedListResponseFactory<
ListTestsRequest, ListTestsResponse, ListTestsPagedResponse>
LIST_TESTS_PAGE_STR_FACT =
new PagedListResponseFactory<
ListTestsRequest, ListTestsResponse, ListTestsPagedResponse>() {
@Override
public ApiFuture<ListTestsPagedResponse> getFuturePagedResponse(
UnaryCallable<ListTestsRequest, ListTestsResponse> callable,
ListTestsRequest request,
ApiCallContext context,
ApiFuture<ListTestsResponse> futureResponse) {
PageContext<ListTestsRequest, ListTestsResponse, Test> pageContext =
PageContext.create(callable, LIST_TESTS_PAGE_STR_DESC, request, context);
return ListTestsPagedResponse.createAsync(pageContext, futureResponse);
}
};
private static final PagedListResponseFactory<
ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse>
LIST_LOCATIONS_PAGE_STR_FACT =
new PagedListResponseFactory<
ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse>() {
@Override
public ApiFuture<ListLocationsPagedResponse> getFuturePagedResponse(
UnaryCallable<ListLocationsRequest, ListLocationsResponse> callable,
ListLocationsRequest request,
ApiCallContext context,
ApiFuture<ListLocationsResponse> futureResponse) {
PageContext<ListLocationsRequest, ListLocationsResponse, Location> pageContext =
PageContext.create(callable, LIST_LOCATIONS_PAGE_STR_DESC, request, context);
return ListLocationsPagedResponse.createAsync(pageContext, futureResponse);
}
};
/** Returns the object with the settings used for calls to createSession. */
public UnaryCallSettings<CreateSessionRequest, Session> createSessionSettings() {
return createSessionSettings;
}
/** Returns the object with the settings used for calls to getSession. */
public UnaryCallSettings<GetSessionRequest, Session> getSessionSettings() {
return getSessionSettings;
}
/** Returns the object with the settings used for calls to listSessions. */
public PagedCallSettings<ListSessionsRequest, ListSessionsResponse, ListSessionsPagedResponse>
listSessionsSettings() {
return listSessionsSettings;
}
/** Returns the object with the settings used for calls to deleteSession. */
public UnaryCallSettings<DeleteSessionRequest, Empty> deleteSessionSettings() {
return deleteSessionSettings;
}
/** Returns the object with the settings used for calls to reportSession. */
public UnaryCallSettings<ReportSessionRequest, ReportSessionResponse> reportSessionSettings() {
return reportSessionSettings;
}
/** Returns the object with the settings used for calls to listTests. */
public PagedCallSettings<ListTestsRequest, ListTestsResponse, ListTestsPagedResponse>
listTestsSettings() {
return listTestsSettings;
}
/** Returns the object with the settings used for calls to deleteTest. */
public UnaryCallSettings<DeleteTestRequest, Empty> deleteTestSettings() {
return deleteTestSettings;
}
/** Returns the object with the settings used for calls to verifyTest. */
public UnaryCallSettings<VerifyTestRequest, VerifyTestResponse> verifyTestSettings() {
return verifyTestSettings;
}
/** Returns the object with the settings used for calls to listLocations. */
public PagedCallSettings<ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse>
listLocationsSettings() {
return listLocationsSettings;
}
/** Returns the object with the settings used for calls to getLocation. */
public UnaryCallSettings<GetLocationRequest, Location> getLocationSettings() {
return getLocationSettings;
}
/** Returns the object with the settings used for calls to setIamPolicy. */
public UnaryCallSettings<SetIamPolicyRequest, Policy> setIamPolicySettings() {
return setIamPolicySettings;
}
/** Returns the object with the settings used for calls to getIamPolicy. */
public UnaryCallSettings<GetIamPolicyRequest, Policy> getIamPolicySettings() {
return getIamPolicySettings;
}
/** Returns the object with the settings used for calls to testIamPermissions. */
public UnaryCallSettings<TestIamPermissionsRequest, TestIamPermissionsResponse>
testIamPermissionsSettings() {
return testIamPermissionsSettings;
}
public TestingStub createStub() throws IOException {
if (getTransportChannelProvider()
.getTransportName()
.equals(GrpcTransportChannel.getGrpcTransportName())) {
return GrpcTestingStub.create(this);
}
if (getTransportChannelProvider()
.getTransportName()
.equals(HttpJsonTransportChannel.getHttpJsonTransportName())) {
return HttpJsonTestingStub.create(this);
}
throw new UnsupportedOperationException(
String.format(
"Transport not supported: %s", getTransportChannelProvider().getTransportName()));
}
/** Returns a builder for the default ExecutorProvider for this service. */
public static InstantiatingExecutorProvider.Builder defaultExecutorProviderBuilder() {
return InstantiatingExecutorProvider.newBuilder();
}
/** Returns the default service endpoint. */
@ObsoleteApi("Use getEndpoint() instead")
public static String getDefaultEndpoint() {
return "localhost:7469";
}
/** Returns the default mTLS service endpoint. */
public static String getDefaultMtlsEndpoint() {
return "localhost:7469";
}
/** Returns the default service scopes. */
public static List<String> getDefaultServiceScopes() {
return DEFAULT_SERVICE_SCOPES;
}
/** Returns a builder for the default credentials for this service. */
public static GoogleCredentialsProvider.Builder defaultCredentialsProviderBuilder() {
return GoogleCredentialsProvider.newBuilder()
.setScopesToApply(DEFAULT_SERVICE_SCOPES)
.setUseJwtAccessWithScope(true);
}
/** Returns a builder for the default gRPC ChannelProvider for this service. */
public static InstantiatingGrpcChannelProvider.Builder defaultGrpcTransportProviderBuilder() {
return InstantiatingGrpcChannelProvider.newBuilder()
.setMaxInboundMessageSize(Integer.MAX_VALUE);
}
/** Returns a builder for the default REST ChannelProvider for this service. */
@BetaApi
public static InstantiatingHttpJsonChannelProvider.Builder
defaultHttpJsonTransportProviderBuilder() {
return InstantiatingHttpJsonChannelProvider.newBuilder();
}
public static TransportChannelProvider defaultTransportChannelProvider() {
return defaultGrpcTransportProviderBuilder().build();
}
public static ApiClientHeaderProvider.Builder defaultGrpcApiClientHeaderProviderBuilder() {
return ApiClientHeaderProvider.newBuilder()
.setGeneratedLibToken("gapic", GaxProperties.getLibraryVersion(TestingStubSettings.class))
.setTransportToken(
GaxGrpcProperties.getGrpcTokenName(), GaxGrpcProperties.getGrpcVersion());
}
public static ApiClientHeaderProvider.Builder defaultHttpJsonApiClientHeaderProviderBuilder() {
return ApiClientHeaderProvider.newBuilder()
.setGeneratedLibToken("gapic", GaxProperties.getLibraryVersion(TestingStubSettings.class))
.setTransportToken(
GaxHttpJsonProperties.getHttpJsonTokenName(),
GaxHttpJsonProperties.getHttpJsonVersion());
}
public static ApiClientHeaderProvider.Builder defaultApiClientHeaderProviderBuilder() {
return TestingStubSettings.defaultGrpcApiClientHeaderProviderBuilder();
}
/** Returns a new gRPC builder for this class. */
public static Builder newBuilder() {
return Builder.createDefault();
}
/** Returns a new REST builder for this class. */
public static Builder newHttpJsonBuilder() {
return Builder.createHttpJsonDefault();
}
/** Returns a new builder for this class. */
public static Builder newBuilder(ClientContext clientContext) {
return new Builder(clientContext);
}
/** Returns a builder containing all the values of this settings class. */
public Builder toBuilder() {
return new Builder(this);
}
protected TestingStubSettings(Builder settingsBuilder) throws IOException {
super(settingsBuilder);
createSessionSettings = settingsBuilder.createSessionSettings().build();
getSessionSettings = settingsBuilder.getSessionSettings().build();
listSessionsSettings = settingsBuilder.listSessionsSettings().build();
deleteSessionSettings = settingsBuilder.deleteSessionSettings().build();
reportSessionSettings = settingsBuilder.reportSessionSettings().build();
listTestsSettings = settingsBuilder.listTestsSettings().build();
deleteTestSettings = settingsBuilder.deleteTestSettings().build();
verifyTestSettings = settingsBuilder.verifyTestSettings().build();
listLocationsSettings = settingsBuilder.listLocationsSettings().build();
getLocationSettings = settingsBuilder.getLocationSettings().build();
setIamPolicySettings = settingsBuilder.setIamPolicySettings().build();
getIamPolicySettings = settingsBuilder.getIamPolicySettings().build();
testIamPermissionsSettings = settingsBuilder.testIamPermissionsSettings().build();
}
/** Builder for TestingStubSettings. */
public static class Builder extends StubSettings.Builder<TestingStubSettings, Builder> {
private final ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders;
private final UnaryCallSettings.Builder<CreateSessionRequest, Session> createSessionSettings;
private final UnaryCallSettings.Builder<GetSessionRequest, Session> getSessionSettings;
private final PagedCallSettings.Builder<
ListSessionsRequest, ListSessionsResponse, ListSessionsPagedResponse>
listSessionsSettings;
private final UnaryCallSettings.Builder<DeleteSessionRequest, Empty> deleteSessionSettings;
private final UnaryCallSettings.Builder<ReportSessionRequest, ReportSessionResponse>
reportSessionSettings;
private final PagedCallSettings.Builder<
ListTestsRequest, ListTestsResponse, ListTestsPagedResponse>
listTestsSettings;
private final UnaryCallSettings.Builder<DeleteTestRequest, Empty> deleteTestSettings;
private final UnaryCallSettings.Builder<VerifyTestRequest, VerifyTestResponse>
verifyTestSettings;
private final PagedCallSettings.Builder<
ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse>
listLocationsSettings;
private final UnaryCallSettings.Builder<GetLocationRequest, Location> getLocationSettings;
private final UnaryCallSettings.Builder<SetIamPolicyRequest, Policy> setIamPolicySettings;
private final UnaryCallSettings.Builder<GetIamPolicyRequest, Policy> getIamPolicySettings;
private final UnaryCallSettings.Builder<TestIamPermissionsRequest, TestIamPermissionsResponse>
testIamPermissionsSettings;
private static final ImmutableMap<String, ImmutableSet<StatusCode.Code>>
RETRYABLE_CODE_DEFINITIONS;
static {
ImmutableMap.Builder<String, ImmutableSet<StatusCode.Code>> definitions =
ImmutableMap.builder();
definitions.put("no_retry_codes", ImmutableSet.copyOf(Lists.<StatusCode.Code>newArrayList()));
RETRYABLE_CODE_DEFINITIONS = definitions.build();
}
private static final ImmutableMap<String, RetrySettings> RETRY_PARAM_DEFINITIONS;
static {
ImmutableMap.Builder<String, RetrySettings> definitions = ImmutableMap.builder();
RetrySettings settings = null;
settings = RetrySettings.newBuilder().setRpcTimeoutMultiplier(1.0).build();
definitions.put("no_retry_params", settings);
RETRY_PARAM_DEFINITIONS = definitions.build();
}
protected Builder() {
this(((ClientContext) null));
}
protected Builder(ClientContext clientContext) {
super(clientContext);
createSessionSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
getSessionSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
listSessionsSettings = PagedCallSettings.newBuilder(LIST_SESSIONS_PAGE_STR_FACT);
deleteSessionSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
reportSessionSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
listTestsSettings = PagedCallSettings.newBuilder(LIST_TESTS_PAGE_STR_FACT);
deleteTestSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
verifyTestSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
listLocationsSettings = PagedCallSettings.newBuilder(LIST_LOCATIONS_PAGE_STR_FACT);
getLocationSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
setIamPolicySettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
getIamPolicySettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
testIamPermissionsSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
unaryMethodSettingsBuilders =
ImmutableList.<UnaryCallSettings.Builder<?, ?>>of(
createSessionSettings,
getSessionSettings,
listSessionsSettings,
deleteSessionSettings,
reportSessionSettings,
listTestsSettings,
deleteTestSettings,
verifyTestSettings,
listLocationsSettings,
getLocationSettings,
setIamPolicySettings,
getIamPolicySettings,
testIamPermissionsSettings);
initDefaults(this);
}
protected Builder(TestingStubSettings settings) {
super(settings);
createSessionSettings = settings.createSessionSettings.toBuilder();
getSessionSettings = settings.getSessionSettings.toBuilder();
listSessionsSettings = settings.listSessionsSettings.toBuilder();
deleteSessionSettings = settings.deleteSessionSettings.toBuilder();
reportSessionSettings = settings.reportSessionSettings.toBuilder();
listTestsSettings = settings.listTestsSettings.toBuilder();
deleteTestSettings = settings.deleteTestSettings.toBuilder();
verifyTestSettings = settings.verifyTestSettings.toBuilder();
listLocationsSettings = settings.listLocationsSettings.toBuilder();
getLocationSettings = settings.getLocationSettings.toBuilder();
setIamPolicySettings = settings.setIamPolicySettings.toBuilder();
getIamPolicySettings = settings.getIamPolicySettings.toBuilder();
testIamPermissionsSettings = settings.testIamPermissionsSettings.toBuilder();
unaryMethodSettingsBuilders =
ImmutableList.<UnaryCallSettings.Builder<?, ?>>of(
createSessionSettings,
getSessionSettings,
listSessionsSettings,
deleteSessionSettings,
reportSessionSettings,
listTestsSettings,
deleteTestSettings,
verifyTestSettings,
listLocationsSettings,
getLocationSettings,
setIamPolicySettings,
getIamPolicySettings,
testIamPermissionsSettings);
}
private static Builder createDefault() {
Builder builder = new Builder(((ClientContext) null));
builder.setTransportChannelProvider(defaultTransportChannelProvider());
builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build());
builder.setInternalHeaderProvider(defaultApiClientHeaderProviderBuilder().build());
builder.setMtlsEndpoint(getDefaultMtlsEndpoint());
builder.setSwitchToMtlsEndpointAllowed(true);
return initDefaults(builder);
}
private static Builder createHttpJsonDefault() {
Builder builder = new Builder(((ClientContext) null));
builder.setTransportChannelProvider(defaultHttpJsonTransportProviderBuilder().build());
builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build());
builder.setInternalHeaderProvider(defaultHttpJsonApiClientHeaderProviderBuilder().build());
builder.setMtlsEndpoint(getDefaultMtlsEndpoint());
builder.setSwitchToMtlsEndpointAllowed(true);
return initDefaults(builder);
}
private static Builder initDefaults(Builder builder) {
builder
.createSessionSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_params"));
builder
.getSessionSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_params"));
builder
.listSessionsSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_params"));
builder
.deleteSessionSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_params"));
builder
.reportSessionSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_params"));
builder
.listTestsSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_params"));
builder
.deleteTestSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_params"));
builder
.verifyTestSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_params"));
builder
.listLocationsSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_params"));
builder
.getLocationSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_params"));
builder
.setIamPolicySettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_params"));
builder
.getIamPolicySettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_params"));
builder
.testIamPermissionsSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_params"));
return builder;
}
/**
* Applies the given settings updater function to all of the unary API methods in this service.
*
* <p>Note: This method does not support applying settings to streaming methods.
*/
public Builder applyToAllUnaryMethods(
ApiFunction<UnaryCallSettings.Builder<?, ?>, Void> settingsUpdater) {
super.applyToAllUnaryMethods(unaryMethodSettingsBuilders, settingsUpdater);
return this;
}
public ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders() {
return unaryMethodSettingsBuilders;
}
/** Returns the builder for the settings used for calls to createSession. */
public UnaryCallSettings.Builder<CreateSessionRequest, Session> createSessionSettings() {
return createSessionSettings;
}
/** Returns the builder for the settings used for calls to getSession. */
public UnaryCallSettings.Builder<GetSessionRequest, Session> getSessionSettings() {
return getSessionSettings;
}
/** Returns the builder for the settings used for calls to listSessions. */
public PagedCallSettings.Builder<
ListSessionsRequest, ListSessionsResponse, ListSessionsPagedResponse>
listSessionsSettings() {
return listSessionsSettings;
}
/** Returns the builder for the settings used for calls to deleteSession. */
public UnaryCallSettings.Builder<DeleteSessionRequest, Empty> deleteSessionSettings() {
return deleteSessionSettings;
}
/** Returns the builder for the settings used for calls to reportSession. */
public UnaryCallSettings.Builder<ReportSessionRequest, ReportSessionResponse>
reportSessionSettings() {
return reportSessionSettings;
}
/** Returns the builder for the settings used for calls to listTests. */
public PagedCallSettings.Builder<ListTestsRequest, ListTestsResponse, ListTestsPagedResponse>
listTestsSettings() {
return listTestsSettings;
}
/** Returns the builder for the settings used for calls to deleteTest. */
public UnaryCallSettings.Builder<DeleteTestRequest, Empty> deleteTestSettings() {
return deleteTestSettings;
}
/** Returns the builder for the settings used for calls to verifyTest. */
public UnaryCallSettings.Builder<VerifyTestRequest, VerifyTestResponse> verifyTestSettings() {
return verifyTestSettings;
}
/** Returns the builder for the settings used for calls to listLocations. */
public PagedCallSettings.Builder<
ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse>
listLocationsSettings() {
return listLocationsSettings;
}
/** Returns the builder for the settings used for calls to getLocation. */
public UnaryCallSettings.Builder<GetLocationRequest, Location> getLocationSettings() {
return getLocationSettings;
}
/** Returns the builder for the settings used for calls to setIamPolicy. */
public UnaryCallSettings.Builder<SetIamPolicyRequest, Policy> setIamPolicySettings() {
return setIamPolicySettings;
}
/** Returns the builder for the settings used for calls to getIamPolicy. */
public UnaryCallSettings.Builder<GetIamPolicyRequest, Policy> getIamPolicySettings() {
return getIamPolicySettings;
}
/** Returns the builder for the settings used for calls to testIamPermissions. */
public UnaryCallSettings.Builder<TestIamPermissionsRequest, TestIamPermissionsResponse>
testIamPermissionsSettings() {
return testIamPermissionsSettings;
}
@Override
public TestingStubSettings build() throws IOException {
return new TestingStubSettings(this);
}
}
}
|
googleapis/google-cloud-java | 34,937 | java-notebooks/proto-google-cloud-notebooks-v1/src/main/java/com/google/cloud/notebooks/v1/CreateInstanceRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/notebooks/v1/service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.notebooks.v1;
/**
*
*
* <pre>
* Request for creating a notebook instance.
* </pre>
*
* Protobuf type {@code google.cloud.notebooks.v1.CreateInstanceRequest}
*/
public final class CreateInstanceRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.notebooks.v1.CreateInstanceRequest)
CreateInstanceRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use CreateInstanceRequest.newBuilder() to construct.
private CreateInstanceRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CreateInstanceRequest() {
parent_ = "";
instanceId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new CreateInstanceRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.notebooks.v1.NotebooksProto
.internal_static_google_cloud_notebooks_v1_CreateInstanceRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.notebooks.v1.NotebooksProto
.internal_static_google_cloud_notebooks_v1_CreateInstanceRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.notebooks.v1.CreateInstanceRequest.class,
com.google.cloud.notebooks.v1.CreateInstanceRequest.Builder.class);
}
private int bitField0_;
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. Format:
* `parent=projects/{project_id}/locations/{location}`
* </pre>
*
* <code>string parent = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. Format:
* `parent=projects/{project_id}/locations/{location}`
* </pre>
*
* <code>string parent = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int INSTANCE_ID_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object instanceId_ = "";
/**
*
*
* <pre>
* Required. User-defined unique ID of this instance.
* </pre>
*
* <code>string instance_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The instanceId.
*/
@java.lang.Override
public java.lang.String getInstanceId() {
java.lang.Object ref = instanceId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
instanceId_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. User-defined unique ID of this instance.
* </pre>
*
* <code>string instance_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for instanceId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getInstanceIdBytes() {
java.lang.Object ref = instanceId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
instanceId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int INSTANCE_FIELD_NUMBER = 3;
private com.google.cloud.notebooks.v1.Instance instance_;
/**
*
*
* <pre>
* Required. The instance to be created.
* </pre>
*
* <code>
* .google.cloud.notebooks.v1.Instance instance = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the instance field is set.
*/
@java.lang.Override
public boolean hasInstance() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The instance to be created.
* </pre>
*
* <code>
* .google.cloud.notebooks.v1.Instance instance = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The instance.
*/
@java.lang.Override
public com.google.cloud.notebooks.v1.Instance getInstance() {
return instance_ == null
? com.google.cloud.notebooks.v1.Instance.getDefaultInstance()
: instance_;
}
/**
*
*
* <pre>
* Required. The instance to be created.
* </pre>
*
* <code>
* .google.cloud.notebooks.v1.Instance instance = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.notebooks.v1.InstanceOrBuilder getInstanceOrBuilder() {
return instance_ == null
? com.google.cloud.notebooks.v1.Instance.getDefaultInstance()
: instance_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(instanceId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, instanceId_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(3, getInstance());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(instanceId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, instanceId_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getInstance());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.notebooks.v1.CreateInstanceRequest)) {
return super.equals(obj);
}
com.google.cloud.notebooks.v1.CreateInstanceRequest other =
(com.google.cloud.notebooks.v1.CreateInstanceRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (!getInstanceId().equals(other.getInstanceId())) return false;
if (hasInstance() != other.hasInstance()) return false;
if (hasInstance()) {
if (!getInstance().equals(other.getInstance())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + INSTANCE_ID_FIELD_NUMBER;
hash = (53 * hash) + getInstanceId().hashCode();
if (hasInstance()) {
hash = (37 * hash) + INSTANCE_FIELD_NUMBER;
hash = (53 * hash) + getInstance().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.notebooks.v1.CreateInstanceRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.notebooks.v1.CreateInstanceRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.notebooks.v1.CreateInstanceRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.notebooks.v1.CreateInstanceRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.notebooks.v1.CreateInstanceRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.notebooks.v1.CreateInstanceRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.notebooks.v1.CreateInstanceRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.notebooks.v1.CreateInstanceRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.notebooks.v1.CreateInstanceRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.notebooks.v1.CreateInstanceRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.notebooks.v1.CreateInstanceRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.notebooks.v1.CreateInstanceRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.notebooks.v1.CreateInstanceRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request for creating a notebook instance.
* </pre>
*
* Protobuf type {@code google.cloud.notebooks.v1.CreateInstanceRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.notebooks.v1.CreateInstanceRequest)
com.google.cloud.notebooks.v1.CreateInstanceRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.notebooks.v1.NotebooksProto
.internal_static_google_cloud_notebooks_v1_CreateInstanceRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.notebooks.v1.NotebooksProto
.internal_static_google_cloud_notebooks_v1_CreateInstanceRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.notebooks.v1.CreateInstanceRequest.class,
com.google.cloud.notebooks.v1.CreateInstanceRequest.Builder.class);
}
// Construct using com.google.cloud.notebooks.v1.CreateInstanceRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getInstanceFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
instanceId_ = "";
instance_ = null;
if (instanceBuilder_ != null) {
instanceBuilder_.dispose();
instanceBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.notebooks.v1.NotebooksProto
.internal_static_google_cloud_notebooks_v1_CreateInstanceRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.notebooks.v1.CreateInstanceRequest getDefaultInstanceForType() {
return com.google.cloud.notebooks.v1.CreateInstanceRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.notebooks.v1.CreateInstanceRequest build() {
com.google.cloud.notebooks.v1.CreateInstanceRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.notebooks.v1.CreateInstanceRequest buildPartial() {
com.google.cloud.notebooks.v1.CreateInstanceRequest result =
new com.google.cloud.notebooks.v1.CreateInstanceRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.notebooks.v1.CreateInstanceRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.instanceId_ = instanceId_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000004) != 0)) {
result.instance_ = instanceBuilder_ == null ? instance_ : instanceBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.notebooks.v1.CreateInstanceRequest) {
return mergeFrom((com.google.cloud.notebooks.v1.CreateInstanceRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.notebooks.v1.CreateInstanceRequest other) {
if (other == com.google.cloud.notebooks.v1.CreateInstanceRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getInstanceId().isEmpty()) {
instanceId_ = other.instanceId_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.hasInstance()) {
mergeInstance(other.getInstance());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
instanceId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
input.readMessage(getInstanceFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. Format:
* `parent=projects/{project_id}/locations/{location}`
* </pre>
*
* <code>string parent = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. Format:
* `parent=projects/{project_id}/locations/{location}`
* </pre>
*
* <code>string parent = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. Format:
* `parent=projects/{project_id}/locations/{location}`
* </pre>
*
* <code>string parent = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Format:
* `parent=projects/{project_id}/locations/{location}`
* </pre>
*
* <code>string parent = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Format:
* `parent=projects/{project_id}/locations/{location}`
* </pre>
*
* <code>string parent = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object instanceId_ = "";
/**
*
*
* <pre>
* Required. User-defined unique ID of this instance.
* </pre>
*
* <code>string instance_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The instanceId.
*/
public java.lang.String getInstanceId() {
java.lang.Object ref = instanceId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
instanceId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. User-defined unique ID of this instance.
* </pre>
*
* <code>string instance_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for instanceId.
*/
public com.google.protobuf.ByteString getInstanceIdBytes() {
java.lang.Object ref = instanceId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
instanceId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. User-defined unique ID of this instance.
* </pre>
*
* <code>string instance_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The instanceId to set.
* @return This builder for chaining.
*/
public Builder setInstanceId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
instanceId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. User-defined unique ID of this instance.
* </pre>
*
* <code>string instance_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearInstanceId() {
instanceId_ = getDefaultInstance().getInstanceId();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. User-defined unique ID of this instance.
* </pre>
*
* <code>string instance_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for instanceId to set.
* @return This builder for chaining.
*/
public Builder setInstanceIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
instanceId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private com.google.cloud.notebooks.v1.Instance instance_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.notebooks.v1.Instance,
com.google.cloud.notebooks.v1.Instance.Builder,
com.google.cloud.notebooks.v1.InstanceOrBuilder>
instanceBuilder_;
/**
*
*
* <pre>
* Required. The instance to be created.
* </pre>
*
* <code>
* .google.cloud.notebooks.v1.Instance instance = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the instance field is set.
*/
public boolean hasInstance() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
*
*
* <pre>
* Required. The instance to be created.
* </pre>
*
* <code>
* .google.cloud.notebooks.v1.Instance instance = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The instance.
*/
public com.google.cloud.notebooks.v1.Instance getInstance() {
if (instanceBuilder_ == null) {
return instance_ == null
? com.google.cloud.notebooks.v1.Instance.getDefaultInstance()
: instance_;
} else {
return instanceBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The instance to be created.
* </pre>
*
* <code>
* .google.cloud.notebooks.v1.Instance instance = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setInstance(com.google.cloud.notebooks.v1.Instance value) {
if (instanceBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
instance_ = value;
} else {
instanceBuilder_.setMessage(value);
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The instance to be created.
* </pre>
*
* <code>
* .google.cloud.notebooks.v1.Instance instance = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setInstance(com.google.cloud.notebooks.v1.Instance.Builder builderForValue) {
if (instanceBuilder_ == null) {
instance_ = builderForValue.build();
} else {
instanceBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The instance to be created.
* </pre>
*
* <code>
* .google.cloud.notebooks.v1.Instance instance = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeInstance(com.google.cloud.notebooks.v1.Instance value) {
if (instanceBuilder_ == null) {
if (((bitField0_ & 0x00000004) != 0)
&& instance_ != null
&& instance_ != com.google.cloud.notebooks.v1.Instance.getDefaultInstance()) {
getInstanceBuilder().mergeFrom(value);
} else {
instance_ = value;
}
} else {
instanceBuilder_.mergeFrom(value);
}
if (instance_ != null) {
bitField0_ |= 0x00000004;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The instance to be created.
* </pre>
*
* <code>
* .google.cloud.notebooks.v1.Instance instance = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearInstance() {
bitField0_ = (bitField0_ & ~0x00000004);
instance_ = null;
if (instanceBuilder_ != null) {
instanceBuilder_.dispose();
instanceBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The instance to be created.
* </pre>
*
* <code>
* .google.cloud.notebooks.v1.Instance instance = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.notebooks.v1.Instance.Builder getInstanceBuilder() {
bitField0_ |= 0x00000004;
onChanged();
return getInstanceFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The instance to be created.
* </pre>
*
* <code>
* .google.cloud.notebooks.v1.Instance instance = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.notebooks.v1.InstanceOrBuilder getInstanceOrBuilder() {
if (instanceBuilder_ != null) {
return instanceBuilder_.getMessageOrBuilder();
} else {
return instance_ == null
? com.google.cloud.notebooks.v1.Instance.getDefaultInstance()
: instance_;
}
}
/**
*
*
* <pre>
* Required. The instance to be created.
* </pre>
*
* <code>
* .google.cloud.notebooks.v1.Instance instance = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.notebooks.v1.Instance,
com.google.cloud.notebooks.v1.Instance.Builder,
com.google.cloud.notebooks.v1.InstanceOrBuilder>
getInstanceFieldBuilder() {
if (instanceBuilder_ == null) {
instanceBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.notebooks.v1.Instance,
com.google.cloud.notebooks.v1.Instance.Builder,
com.google.cloud.notebooks.v1.InstanceOrBuilder>(
getInstance(), getParentForChildren(), isClean());
instance_ = null;
}
return instanceBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.notebooks.v1.CreateInstanceRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.notebooks.v1.CreateInstanceRequest)
private static final com.google.cloud.notebooks.v1.CreateInstanceRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.notebooks.v1.CreateInstanceRequest();
}
public static com.google.cloud.notebooks.v1.CreateInstanceRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CreateInstanceRequest> PARSER =
new com.google.protobuf.AbstractParser<CreateInstanceRequest>() {
@java.lang.Override
public CreateInstanceRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<CreateInstanceRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CreateInstanceRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.notebooks.v1.CreateInstanceRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 35,059 | java-aiplatform/proto-google-cloud-aiplatform-v1/src/main/java/com/google/cloud/aiplatform/v1/CreateModelDeploymentMonitoringJobRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/aiplatform/v1/job_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.aiplatform.v1;
/**
*
*
* <pre>
* Request message for
* [JobService.CreateModelDeploymentMonitoringJob][google.cloud.aiplatform.v1.JobService.CreateModelDeploymentMonitoringJob].
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1.CreateModelDeploymentMonitoringJobRequest}
*/
public final class CreateModelDeploymentMonitoringJobRequest
extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1.CreateModelDeploymentMonitoringJobRequest)
CreateModelDeploymentMonitoringJobRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use CreateModelDeploymentMonitoringJobRequest.newBuilder() to construct.
private CreateModelDeploymentMonitoringJobRequest(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CreateModelDeploymentMonitoringJobRequest() {
parent_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new CreateModelDeploymentMonitoringJobRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1.JobServiceProto
.internal_static_google_cloud_aiplatform_v1_CreateModelDeploymentMonitoringJobRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1.JobServiceProto
.internal_static_google_cloud_aiplatform_v1_CreateModelDeploymentMonitoringJobRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1.CreateModelDeploymentMonitoringJobRequest.class,
com.google.cloud.aiplatform.v1.CreateModelDeploymentMonitoringJobRequest.Builder.class);
}
private int bitField0_;
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The parent of the ModelDeploymentMonitoringJob.
* Format: `projects/{project}/locations/{location}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The parent of the ModelDeploymentMonitoringJob.
* Format: `projects/{project}/locations/{location}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int MODEL_DEPLOYMENT_MONITORING_JOB_FIELD_NUMBER = 2;
private com.google.cloud.aiplatform.v1.ModelDeploymentMonitoringJob modelDeploymentMonitoringJob_;
/**
*
*
* <pre>
* Required. The ModelDeploymentMonitoringJob to create
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.ModelDeploymentMonitoringJob model_deployment_monitoring_job = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the modelDeploymentMonitoringJob field is set.
*/
@java.lang.Override
public boolean hasModelDeploymentMonitoringJob() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The ModelDeploymentMonitoringJob to create
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.ModelDeploymentMonitoringJob model_deployment_monitoring_job = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The modelDeploymentMonitoringJob.
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1.ModelDeploymentMonitoringJob
getModelDeploymentMonitoringJob() {
return modelDeploymentMonitoringJob_ == null
? com.google.cloud.aiplatform.v1.ModelDeploymentMonitoringJob.getDefaultInstance()
: modelDeploymentMonitoringJob_;
}
/**
*
*
* <pre>
* Required. The ModelDeploymentMonitoringJob to create
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.ModelDeploymentMonitoringJob model_deployment_monitoring_job = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1.ModelDeploymentMonitoringJobOrBuilder
getModelDeploymentMonitoringJobOrBuilder() {
return modelDeploymentMonitoringJob_ == null
? com.google.cloud.aiplatform.v1.ModelDeploymentMonitoringJob.getDefaultInstance()
: modelDeploymentMonitoringJob_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(2, getModelDeploymentMonitoringJob());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(
2, getModelDeploymentMonitoringJob());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj
instanceof com.google.cloud.aiplatform.v1.CreateModelDeploymentMonitoringJobRequest)) {
return super.equals(obj);
}
com.google.cloud.aiplatform.v1.CreateModelDeploymentMonitoringJobRequest other =
(com.google.cloud.aiplatform.v1.CreateModelDeploymentMonitoringJobRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (hasModelDeploymentMonitoringJob() != other.hasModelDeploymentMonitoringJob()) return false;
if (hasModelDeploymentMonitoringJob()) {
if (!getModelDeploymentMonitoringJob().equals(other.getModelDeploymentMonitoringJob()))
return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
if (hasModelDeploymentMonitoringJob()) {
hash = (37 * hash) + MODEL_DEPLOYMENT_MONITORING_JOB_FIELD_NUMBER;
hash = (53 * hash) + getModelDeploymentMonitoringJob().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.aiplatform.v1.CreateModelDeploymentMonitoringJobRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.CreateModelDeploymentMonitoringJobRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.CreateModelDeploymentMonitoringJobRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.CreateModelDeploymentMonitoringJobRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.CreateModelDeploymentMonitoringJobRequest parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.CreateModelDeploymentMonitoringJobRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.CreateModelDeploymentMonitoringJobRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.CreateModelDeploymentMonitoringJobRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.CreateModelDeploymentMonitoringJobRequest
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.CreateModelDeploymentMonitoringJobRequest
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.CreateModelDeploymentMonitoringJobRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.CreateModelDeploymentMonitoringJobRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.aiplatform.v1.CreateModelDeploymentMonitoringJobRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for
* [JobService.CreateModelDeploymentMonitoringJob][google.cloud.aiplatform.v1.JobService.CreateModelDeploymentMonitoringJob].
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1.CreateModelDeploymentMonitoringJobRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1.CreateModelDeploymentMonitoringJobRequest)
com.google.cloud.aiplatform.v1.CreateModelDeploymentMonitoringJobRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1.JobServiceProto
.internal_static_google_cloud_aiplatform_v1_CreateModelDeploymentMonitoringJobRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1.JobServiceProto
.internal_static_google_cloud_aiplatform_v1_CreateModelDeploymentMonitoringJobRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1.CreateModelDeploymentMonitoringJobRequest.class,
com.google.cloud.aiplatform.v1.CreateModelDeploymentMonitoringJobRequest.Builder
.class);
}
// Construct using
// com.google.cloud.aiplatform.v1.CreateModelDeploymentMonitoringJobRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getModelDeploymentMonitoringJobFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
modelDeploymentMonitoringJob_ = null;
if (modelDeploymentMonitoringJobBuilder_ != null) {
modelDeploymentMonitoringJobBuilder_.dispose();
modelDeploymentMonitoringJobBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.aiplatform.v1.JobServiceProto
.internal_static_google_cloud_aiplatform_v1_CreateModelDeploymentMonitoringJobRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.CreateModelDeploymentMonitoringJobRequest
getDefaultInstanceForType() {
return com.google.cloud.aiplatform.v1.CreateModelDeploymentMonitoringJobRequest
.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.CreateModelDeploymentMonitoringJobRequest build() {
com.google.cloud.aiplatform.v1.CreateModelDeploymentMonitoringJobRequest result =
buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.CreateModelDeploymentMonitoringJobRequest buildPartial() {
com.google.cloud.aiplatform.v1.CreateModelDeploymentMonitoringJobRequest result =
new com.google.cloud.aiplatform.v1.CreateModelDeploymentMonitoringJobRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.aiplatform.v1.CreateModelDeploymentMonitoringJobRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.modelDeploymentMonitoringJob_ =
modelDeploymentMonitoringJobBuilder_ == null
? modelDeploymentMonitoringJob_
: modelDeploymentMonitoringJobBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other
instanceof com.google.cloud.aiplatform.v1.CreateModelDeploymentMonitoringJobRequest) {
return mergeFrom(
(com.google.cloud.aiplatform.v1.CreateModelDeploymentMonitoringJobRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.aiplatform.v1.CreateModelDeploymentMonitoringJobRequest other) {
if (other
== com.google.cloud.aiplatform.v1.CreateModelDeploymentMonitoringJobRequest
.getDefaultInstance()) return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.hasModelDeploymentMonitoringJob()) {
mergeModelDeploymentMonitoringJob(other.getModelDeploymentMonitoringJob());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(
getModelDeploymentMonitoringJobFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The parent of the ModelDeploymentMonitoringJob.
* Format: `projects/{project}/locations/{location}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The parent of the ModelDeploymentMonitoringJob.
* Format: `projects/{project}/locations/{location}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The parent of the ModelDeploymentMonitoringJob.
* Format: `projects/{project}/locations/{location}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The parent of the ModelDeploymentMonitoringJob.
* Format: `projects/{project}/locations/{location}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The parent of the ModelDeploymentMonitoringJob.
* Format: `projects/{project}/locations/{location}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private com.google.cloud.aiplatform.v1.ModelDeploymentMonitoringJob
modelDeploymentMonitoringJob_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1.ModelDeploymentMonitoringJob,
com.google.cloud.aiplatform.v1.ModelDeploymentMonitoringJob.Builder,
com.google.cloud.aiplatform.v1.ModelDeploymentMonitoringJobOrBuilder>
modelDeploymentMonitoringJobBuilder_;
/**
*
*
* <pre>
* Required. The ModelDeploymentMonitoringJob to create
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.ModelDeploymentMonitoringJob model_deployment_monitoring_job = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the modelDeploymentMonitoringJob field is set.
*/
public boolean hasModelDeploymentMonitoringJob() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The ModelDeploymentMonitoringJob to create
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.ModelDeploymentMonitoringJob model_deployment_monitoring_job = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The modelDeploymentMonitoringJob.
*/
public com.google.cloud.aiplatform.v1.ModelDeploymentMonitoringJob
getModelDeploymentMonitoringJob() {
if (modelDeploymentMonitoringJobBuilder_ == null) {
return modelDeploymentMonitoringJob_ == null
? com.google.cloud.aiplatform.v1.ModelDeploymentMonitoringJob.getDefaultInstance()
: modelDeploymentMonitoringJob_;
} else {
return modelDeploymentMonitoringJobBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The ModelDeploymentMonitoringJob to create
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.ModelDeploymentMonitoringJob model_deployment_monitoring_job = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setModelDeploymentMonitoringJob(
com.google.cloud.aiplatform.v1.ModelDeploymentMonitoringJob value) {
if (modelDeploymentMonitoringJobBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
modelDeploymentMonitoringJob_ = value;
} else {
modelDeploymentMonitoringJobBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The ModelDeploymentMonitoringJob to create
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.ModelDeploymentMonitoringJob model_deployment_monitoring_job = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setModelDeploymentMonitoringJob(
com.google.cloud.aiplatform.v1.ModelDeploymentMonitoringJob.Builder builderForValue) {
if (modelDeploymentMonitoringJobBuilder_ == null) {
modelDeploymentMonitoringJob_ = builderForValue.build();
} else {
modelDeploymentMonitoringJobBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The ModelDeploymentMonitoringJob to create
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.ModelDeploymentMonitoringJob model_deployment_monitoring_job = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeModelDeploymentMonitoringJob(
com.google.cloud.aiplatform.v1.ModelDeploymentMonitoringJob value) {
if (modelDeploymentMonitoringJobBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& modelDeploymentMonitoringJob_ != null
&& modelDeploymentMonitoringJob_
!= com.google.cloud.aiplatform.v1.ModelDeploymentMonitoringJob
.getDefaultInstance()) {
getModelDeploymentMonitoringJobBuilder().mergeFrom(value);
} else {
modelDeploymentMonitoringJob_ = value;
}
} else {
modelDeploymentMonitoringJobBuilder_.mergeFrom(value);
}
if (modelDeploymentMonitoringJob_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The ModelDeploymentMonitoringJob to create
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.ModelDeploymentMonitoringJob model_deployment_monitoring_job = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearModelDeploymentMonitoringJob() {
bitField0_ = (bitField0_ & ~0x00000002);
modelDeploymentMonitoringJob_ = null;
if (modelDeploymentMonitoringJobBuilder_ != null) {
modelDeploymentMonitoringJobBuilder_.dispose();
modelDeploymentMonitoringJobBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The ModelDeploymentMonitoringJob to create
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.ModelDeploymentMonitoringJob model_deployment_monitoring_job = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.aiplatform.v1.ModelDeploymentMonitoringJob.Builder
getModelDeploymentMonitoringJobBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getModelDeploymentMonitoringJobFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The ModelDeploymentMonitoringJob to create
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.ModelDeploymentMonitoringJob model_deployment_monitoring_job = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.aiplatform.v1.ModelDeploymentMonitoringJobOrBuilder
getModelDeploymentMonitoringJobOrBuilder() {
if (modelDeploymentMonitoringJobBuilder_ != null) {
return modelDeploymentMonitoringJobBuilder_.getMessageOrBuilder();
} else {
return modelDeploymentMonitoringJob_ == null
? com.google.cloud.aiplatform.v1.ModelDeploymentMonitoringJob.getDefaultInstance()
: modelDeploymentMonitoringJob_;
}
}
/**
*
*
* <pre>
* Required. The ModelDeploymentMonitoringJob to create
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.ModelDeploymentMonitoringJob model_deployment_monitoring_job = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1.ModelDeploymentMonitoringJob,
com.google.cloud.aiplatform.v1.ModelDeploymentMonitoringJob.Builder,
com.google.cloud.aiplatform.v1.ModelDeploymentMonitoringJobOrBuilder>
getModelDeploymentMonitoringJobFieldBuilder() {
if (modelDeploymentMonitoringJobBuilder_ == null) {
modelDeploymentMonitoringJobBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1.ModelDeploymentMonitoringJob,
com.google.cloud.aiplatform.v1.ModelDeploymentMonitoringJob.Builder,
com.google.cloud.aiplatform.v1.ModelDeploymentMonitoringJobOrBuilder>(
getModelDeploymentMonitoringJob(), getParentForChildren(), isClean());
modelDeploymentMonitoringJob_ = null;
}
return modelDeploymentMonitoringJobBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1.CreateModelDeploymentMonitoringJobRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1.CreateModelDeploymentMonitoringJobRequest)
private static final com.google.cloud.aiplatform.v1.CreateModelDeploymentMonitoringJobRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE =
new com.google.cloud.aiplatform.v1.CreateModelDeploymentMonitoringJobRequest();
}
public static com.google.cloud.aiplatform.v1.CreateModelDeploymentMonitoringJobRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CreateModelDeploymentMonitoringJobRequest>
PARSER =
new com.google.protobuf.AbstractParser<CreateModelDeploymentMonitoringJobRequest>() {
@java.lang.Override
public CreateModelDeploymentMonitoringJobRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException()
.setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<CreateModelDeploymentMonitoringJobRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CreateModelDeploymentMonitoringJobRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.CreateModelDeploymentMonitoringJobRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/incubator-xtable | 35,098 | xtable-core/src/test/java/org/apache/xtable/schema/TestSparkSchemaExtractor.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.xtable.schema;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import org.apache.spark.sql.types.DataTypes;
import org.apache.spark.sql.types.StructType;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import org.apache.xtable.model.schema.InternalField;
import org.apache.xtable.model.schema.InternalSchema;
import org.apache.xtable.model.schema.InternalType;
public class TestSparkSchemaExtractor {
@Test
public void testPrimitiveTypes() {
Map<InternalSchema.MetadataKey, Object> decimalMetadata = new HashMap<>();
decimalMetadata.put(InternalSchema.MetadataKey.DECIMAL_PRECISION, 10);
decimalMetadata.put(InternalSchema.MetadataKey.DECIMAL_SCALE, 2);
InternalSchema InternalSchemaRepresentation =
InternalSchema.builder()
.name("struct")
.dataType(InternalType.RECORD)
.isNullable(false)
.fields(
Arrays.asList(
InternalField.builder()
.name("requiredBoolean")
.schema(
InternalSchema.builder()
.name("boolean")
.dataType(InternalType.BOOLEAN)
.isNullable(false)
.build())
.build(),
InternalField.builder()
.name("optionalBoolean")
.schema(
InternalSchema.builder()
.name("boolean")
.dataType(InternalType.BOOLEAN)
.isNullable(true)
.build())
.defaultValue(InternalField.Constants.NULL_DEFAULT_VALUE)
.build(),
InternalField.builder()
.name("requiredInt")
.schema(
InternalSchema.builder()
.name("integer")
.dataType(InternalType.INT)
.isNullable(false)
.build())
.build(),
InternalField.builder()
.name("optionalInt")
.schema(
InternalSchema.builder()
.name("integer")
.dataType(InternalType.INT)
.isNullable(true)
.build())
.defaultValue(InternalField.Constants.NULL_DEFAULT_VALUE)
.build(),
InternalField.builder()
.name("requiredLong")
.schema(
InternalSchema.builder()
.name("long")
.dataType(InternalType.LONG)
.isNullable(false)
.build())
.build(),
InternalField.builder()
.name("optionalLong")
.schema(
InternalSchema.builder()
.name("long")
.dataType(InternalType.LONG)
.isNullable(true)
.build())
.defaultValue(InternalField.Constants.NULL_DEFAULT_VALUE)
.build(),
InternalField.builder()
.name("requiredDouble")
.schema(
InternalSchema.builder()
.name("double")
.dataType(InternalType.DOUBLE)
.isNullable(false)
.build())
.build(),
InternalField.builder()
.name("optionalDouble")
.schema(
InternalSchema.builder()
.name("double")
.dataType(InternalType.DOUBLE)
.isNullable(true)
.build())
.defaultValue(InternalField.Constants.NULL_DEFAULT_VALUE)
.build(),
InternalField.builder()
.name("requiredFloat")
.schema(
InternalSchema.builder()
.name("float")
.dataType(InternalType.FLOAT)
.isNullable(false)
.build())
.build(),
InternalField.builder()
.name("optionalFloat")
.schema(
InternalSchema.builder()
.name("float")
.dataType(InternalType.FLOAT)
.isNullable(true)
.build())
.defaultValue(InternalField.Constants.NULL_DEFAULT_VALUE)
.build(),
InternalField.builder()
.name("requiredString")
.schema(
InternalSchema.builder()
.name("string")
.dataType(InternalType.STRING)
.isNullable(false)
.build())
.build(),
InternalField.builder()
.name("optionalString")
.schema(
InternalSchema.builder()
.name("string")
.dataType(InternalType.STRING)
.isNullable(true)
.build())
.defaultValue(InternalField.Constants.NULL_DEFAULT_VALUE)
.build(),
InternalField.builder()
.name("requiredBytes")
.schema(
InternalSchema.builder()
.name("binary")
.dataType(InternalType.BYTES)
.isNullable(false)
.build())
.build(),
InternalField.builder()
.name("optionalBytes")
.schema(
InternalSchema.builder()
.name("binary")
.dataType(InternalType.BYTES)
.isNullable(true)
.build())
.defaultValue(InternalField.Constants.NULL_DEFAULT_VALUE)
.build(),
InternalField.builder()
.name("requiredDate")
.schema(
InternalSchema.builder()
.name("date")
.dataType(InternalType.DATE)
.isNullable(false)
.build())
.build(),
InternalField.builder()
.name("optionalDate")
.schema(
InternalSchema.builder()
.name("date")
.dataType(InternalType.DATE)
.isNullable(true)
.build())
.defaultValue(InternalField.Constants.NULL_DEFAULT_VALUE)
.build(),
InternalField.builder()
.name("requiredDecimal")
.schema(
InternalSchema.builder()
.name("decimal")
.dataType(InternalType.DECIMAL)
.isNullable(false)
.metadata(decimalMetadata)
.build())
.build(),
InternalField.builder()
.name("optionalDecimal")
.schema(
InternalSchema.builder()
.name("decimal")
.dataType(InternalType.DECIMAL)
.isNullable(true)
.metadata(decimalMetadata)
.build())
.defaultValue(InternalField.Constants.NULL_DEFAULT_VALUE)
.build()))
.build();
StructType structRepresentation =
new StructType()
.add("requiredBoolean", DataTypes.BooleanType, false)
.add("optionalBoolean", DataTypes.BooleanType, true)
.add("requiredInt", DataTypes.IntegerType, false)
.add("optionalInt", DataTypes.IntegerType, true)
.add("requiredLong", DataTypes.LongType, false)
.add("optionalLong", DataTypes.LongType, true)
.add("requiredDouble", DataTypes.DoubleType, false)
.add("optionalDouble", DataTypes.DoubleType, true)
.add("requiredFloat", DataTypes.FloatType, false)
.add("optionalFloat", DataTypes.FloatType, true)
.add("requiredString", DataTypes.StringType, false)
.add("optionalString", DataTypes.StringType, true)
.add("requiredBytes", DataTypes.BinaryType, false)
.add("optionalBytes", DataTypes.BinaryType, true)
.add("requiredDate", DataTypes.DateType, false)
.add("optionalDate", DataTypes.DateType, true)
.add("requiredDecimal", DataTypes.createDecimalType(10, 2), false)
.add("optionalDecimal", DataTypes.createDecimalType(10, 2), true);
Assertions.assertEquals(
structRepresentation,
SparkSchemaExtractor.getInstance().fromInternalSchema(InternalSchemaRepresentation));
}
@Test
public void testFixedBytes() {
InternalSchema InternalSchemaRepresentationOriginal =
InternalSchema.builder()
.name("struct")
.dataType(InternalType.RECORD)
.isNullable(false)
.fields(
Arrays.asList(
InternalField.builder()
.name("requiredFixed")
.schema(
InternalSchema.builder()
.name("fixed")
.dataType(InternalType.FIXED)
.isNullable(false)
.build())
.build(),
InternalField.builder()
.name("optionalFixed")
.schema(
InternalSchema.builder()
.name("fixed")
.dataType(InternalType.FIXED)
.isNullable(true)
.build())
.defaultValue(InternalField.Constants.NULL_DEFAULT_VALUE)
.build()))
.build();
InternalSchema InternalSchemaRepresentationAfterRoundTrip =
InternalSchema.builder()
.name("struct")
.dataType(InternalType.RECORD)
.isNullable(false)
.fields(
Arrays.asList(
InternalField.builder()
.name("requiredFixed")
.schema(
InternalSchema.builder()
.name("binary")
.dataType(InternalType.BYTES)
.isNullable(false)
.build())
.build(),
InternalField.builder()
.name("optionalFixed")
.schema(
InternalSchema.builder()
.name("binary")
.dataType(InternalType.BYTES)
.isNullable(true)
.build())
.defaultValue(InternalField.Constants.NULL_DEFAULT_VALUE)
.build()))
.build();
StructType structRepresentation =
new StructType()
.add("requiredFixed", DataTypes.BinaryType, false)
.add("optionalFixed", DataTypes.BinaryType, true);
Assertions.assertEquals(
structRepresentation,
SparkSchemaExtractor.getInstance()
.fromInternalSchema(InternalSchemaRepresentationOriginal));
}
@Test
public void testTimestamps() {
Map<InternalSchema.MetadataKey, Object> metadata =
Collections.singletonMap(
InternalSchema.MetadataKey.TIMESTAMP_PRECISION, InternalSchema.MetadataValue.MICROS);
InternalSchema InternalSchemaRepresentationTimestamp =
InternalSchema.builder()
.name("struct")
.dataType(InternalType.RECORD)
.isNullable(false)
.fields(
Arrays.asList(
InternalField.builder()
.name("requiredTimestamp")
.schema(
InternalSchema.builder()
.name("timestamp")
.dataType(InternalType.TIMESTAMP)
.isNullable(false)
.metadata(metadata)
.build())
.build(),
InternalField.builder()
.name("optionalTimestamp")
.schema(
InternalSchema.builder()
.name("timestamp")
.dataType(InternalType.TIMESTAMP)
.isNullable(true)
.metadata(metadata)
.build())
.defaultValue(InternalField.Constants.NULL_DEFAULT_VALUE)
.build()))
.build();
InternalSchema InternalSchemaRepresentationTimestampNtz =
InternalSchema.builder()
.name("struct")
.dataType(InternalType.RECORD)
.isNullable(false)
.fields(
Arrays.asList(
InternalField.builder()
.name("requiredTimestampNtz")
.schema(
InternalSchema.builder()
.name("timestampNtz")
.dataType(InternalType.TIMESTAMP_NTZ)
.isNullable(false)
.build())
.build(),
InternalField.builder()
.name("optionalTimestampNtz")
.schema(
InternalSchema.builder()
.name("timestampNtz")
.dataType(InternalType.TIMESTAMP_NTZ)
.isNullable(true)
.build())
.defaultValue(InternalField.Constants.NULL_DEFAULT_VALUE)
.build()))
.build();
StructType structRepresentationTimestamp =
new StructType()
.add("requiredTimestamp", DataTypes.TimestampType, false)
.add("optionalTimestamp", DataTypes.TimestampType, true);
StructType structRepresentationTimestampNtz =
new StructType()
.add("requiredTimestampNtz", DataTypes.TimestampNTZType, false)
.add("optionalTimestampNtz", DataTypes.TimestampNTZType, true);
Assertions.assertEquals(
structRepresentationTimestamp,
SparkSchemaExtractor.getInstance()
.fromInternalSchema(InternalSchemaRepresentationTimestamp));
Assertions.assertEquals(
structRepresentationTimestampNtz,
SparkSchemaExtractor.getInstance()
.fromInternalSchema(InternalSchemaRepresentationTimestampNtz));
}
@Test
public void testEnums() {
Map<InternalSchema.MetadataKey, Object> requiredEnumMetadata = new HashMap<>();
requiredEnumMetadata.put(InternalSchema.MetadataKey.ENUM_VALUES, Arrays.asList("ONE", "TWO"));
Map<InternalSchema.MetadataKey, Object> optionalEnumMetadata = new HashMap<>();
optionalEnumMetadata.put(
InternalSchema.MetadataKey.ENUM_VALUES, Arrays.asList("THREE", "FOUR"));
InternalSchema InternalSchemaRepresentation =
InternalSchema.builder()
.name("struct")
.dataType(InternalType.RECORD)
.isNullable(false)
.fields(
Arrays.asList(
InternalField.builder()
.name("requiredEnum")
.schema(
InternalSchema.builder()
.name("REQUIRED_ENUM")
.dataType(InternalType.ENUM)
.isNullable(false)
.metadata(requiredEnumMetadata)
.build())
.build(),
InternalField.builder()
.name("optionalEnum")
.schema(
InternalSchema.builder()
.name("OPTIONAL_ENUM")
.dataType(InternalType.ENUM)
.isNullable(true)
.metadata(optionalEnumMetadata)
.build())
.defaultValue(InternalField.Constants.NULL_DEFAULT_VALUE)
.build()))
.build();
StructType structRepresentation =
new StructType()
.add("requiredEnum", DataTypes.StringType, false)
.add("optionalEnum", DataTypes.StringType, true);
Assertions.assertEquals(
structRepresentation,
SparkSchemaExtractor.getInstance().fromInternalSchema(InternalSchemaRepresentation));
}
@Test
public void testMaps() {
InternalSchema recordMapElementSchema =
InternalSchema.builder()
.name("struct")
.isNullable(true)
.fields(
Arrays.asList(
InternalField.builder()
.name("requiredDouble")
.parentPath("recordMap._one_field_value")
.schema(
InternalSchema.builder()
.name("double")
.dataType(InternalType.DOUBLE)
.isNullable(false)
.build())
.build(),
InternalField.builder()
.name("optionalString")
.parentPath("recordMap._one_field_value")
.schema(
InternalSchema.builder()
.name("string")
.dataType(InternalType.STRING)
.isNullable(true)
.build())
.defaultValue(InternalField.Constants.NULL_DEFAULT_VALUE)
.build()))
.dataType(InternalType.RECORD)
.build();
InternalSchema InternalSchemaRepresentation =
InternalSchema.builder()
.name("struct")
.dataType(InternalType.RECORD)
.isNullable(false)
.fields(
Arrays.asList(
InternalField.builder()
.name("intMap")
.schema(
InternalSchema.builder()
.name("map")
.isNullable(false)
.dataType(InternalType.MAP)
.fields(
Arrays.asList(
InternalField.builder()
.name(InternalField.Constants.MAP_KEY_FIELD_NAME)
.parentPath("intMap")
.schema(
InternalSchema.builder()
.name("string")
.dataType(InternalType.STRING)
.isNullable(false)
.build())
.build(),
InternalField.builder()
.name(InternalField.Constants.MAP_VALUE_FIELD_NAME)
.parentPath("intMap")
.schema(
InternalSchema.builder()
.name("integer")
.dataType(InternalType.INT)
.isNullable(false)
.build())
.build()))
.build())
.build(),
InternalField.builder()
.name("recordMap")
.schema(
InternalSchema.builder()
.name("map")
.isNullable(true)
.dataType(InternalType.MAP)
.fields(
Arrays.asList(
InternalField.builder()
.name(InternalField.Constants.MAP_KEY_FIELD_NAME)
.parentPath("recordMap")
.schema(
InternalSchema.builder()
.name("integer")
.dataType(InternalType.INT)
.isNullable(false)
.build())
.build(),
InternalField.builder()
.name(InternalField.Constants.MAP_VALUE_FIELD_NAME)
.parentPath("recordMap")
.schema(recordMapElementSchema)
.build()))
.build())
.defaultValue(InternalField.Constants.NULL_DEFAULT_VALUE)
.build()))
.build();
StructType mapElement =
new StructType()
.add("requiredDouble", DataTypes.DoubleType, false)
.add("optionalString", DataTypes.StringType, true);
StructType structRepresentation =
new StructType()
.add(
"intMap",
DataTypes.createMapType(DataTypes.StringType, DataTypes.IntegerType, false),
false)
.add("recordMap", DataTypes.createMapType(DataTypes.IntegerType, mapElement, true));
Assertions.assertEquals(
structRepresentation,
SparkSchemaExtractor.getInstance().fromInternalSchema(InternalSchemaRepresentation));
}
@Test
public void testLists() {
InternalSchema recordListElementSchema =
InternalSchema.builder()
.name("struct")
.isNullable(true)
.fields(
Arrays.asList(
InternalField.builder()
.name("requiredDouble")
.parentPath("recordList._one_field_element")
.schema(
InternalSchema.builder()
.name("double")
.dataType(InternalType.DOUBLE)
.isNullable(false)
.build())
.build(),
InternalField.builder()
.name("optionalString")
.parentPath("recordList._one_field_element")
.schema(
InternalSchema.builder()
.name("string")
.dataType(InternalType.STRING)
.isNullable(true)
.build())
.defaultValue(InternalField.Constants.NULL_DEFAULT_VALUE)
.build()))
.dataType(InternalType.RECORD)
.build();
InternalSchema InternalSchemaRepresentation =
InternalSchema.builder()
.name("struct")
.dataType(InternalType.RECORD)
.isNullable(false)
.fields(
Arrays.asList(
InternalField.builder()
.name("intList")
.schema(
InternalSchema.builder()
.name("array")
.isNullable(false)
.dataType(InternalType.LIST)
.fields(
Collections.singletonList(
InternalField.builder()
.name(InternalField.Constants.ARRAY_ELEMENT_FIELD_NAME)
.parentPath("intList")
.schema(
InternalSchema.builder()
.name("integer")
.dataType(InternalType.INT)
.isNullable(false)
.build())
.build()))
.build())
.build(),
InternalField.builder()
.name("recordList")
.schema(
InternalSchema.builder()
.name("array")
.isNullable(true)
.dataType(InternalType.LIST)
.fields(
Collections.singletonList(
InternalField.builder()
.name(InternalField.Constants.ARRAY_ELEMENT_FIELD_NAME)
.parentPath("recordList")
.schema(recordListElementSchema)
.build()))
.build())
.defaultValue(InternalField.Constants.NULL_DEFAULT_VALUE)
.build()))
.build();
StructType elementSchema =
new StructType()
.add("requiredDouble", DataTypes.DoubleType, false)
.add("optionalString", DataTypes.StringType, true);
StructType structRepresentation =
new StructType()
.add("intList", DataTypes.createArrayType(DataTypes.IntegerType, false), false)
.add("recordList", DataTypes.createArrayType(elementSchema, true), true);
Assertions.assertEquals(
structRepresentation,
SparkSchemaExtractor.getInstance().fromInternalSchema(InternalSchemaRepresentation));
}
@Test
public void testNestedRecords() {
InternalSchema InternalSchemaRepresentation =
InternalSchema.builder()
.name("struct")
.dataType(InternalType.RECORD)
.isNullable(false)
.fields(
Arrays.asList(
InternalField.builder()
.name("nestedOne")
.defaultValue(InternalField.Constants.NULL_DEFAULT_VALUE)
.schema(
InternalSchema.builder()
.name("struct")
.dataType(InternalType.RECORD)
.isNullable(true)
.fields(
Arrays.asList(
InternalField.builder()
.name("nestedOptionalInt")
.parentPath("nestedOne")
.schema(
InternalSchema.builder()
.name("integer")
.dataType(InternalType.INT)
.isNullable(true)
.build())
.defaultValue(
InternalField.Constants.NULL_DEFAULT_VALUE)
.build(),
InternalField.builder()
.name("nestedRequiredDouble")
.parentPath("nestedOne")
.schema(
InternalSchema.builder()
.name("double")
.dataType(InternalType.DOUBLE)
.isNullable(false)
.build())
.build(),
InternalField.builder()
.name("nestedTwo")
.parentPath("nestedOne")
.schema(
InternalSchema.builder()
.name("struct")
.dataType(InternalType.RECORD)
.isNullable(false)
.fields(
Arrays.asList(
InternalField.builder()
.name("doublyNestedString")
.parentPath("nestedOne.nestedTwo")
.schema(
InternalSchema.builder()
.name("string")
.dataType(
InternalType.STRING)
.isNullable(true)
.build())
.defaultValue(
InternalField.Constants
.NULL_DEFAULT_VALUE)
.build()))
.build())
.build()))
.build())
.build()))
.build();
StructType structRepresentation =
new StructType()
.add(
"nestedOne",
new StructType()
.add("nestedOptionalInt", DataTypes.IntegerType, true)
.add("nestedRequiredDouble", DataTypes.DoubleType, false)
.add(
"nestedTwo",
new StructType().add("doublyNestedString", DataTypes.StringType, true),
false),
true);
Assertions.assertEquals(
structRepresentation,
SparkSchemaExtractor.getInstance().fromInternalSchema(InternalSchemaRepresentation));
}
}
|
googleapis/google-cloud-java | 35,228 | java-compute/proto-google-cloud-compute-v1/src/main/java/com/google/cloud/compute/v1/FirewallOrBuilder.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/compute/v1/compute.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.compute.v1;
public interface FirewallOrBuilder
extends
// @@protoc_insertion_point(interface_extends:google.cloud.compute.v1.Firewall)
com.google.protobuf.MessageOrBuilder {
/**
*
*
* <pre>
* The list of ALLOW rules specified by this firewall. Each rule specifies a protocol and port-range tuple that describes a permitted connection.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.Allowed allowed = 162398632;</code>
*/
java.util.List<com.google.cloud.compute.v1.Allowed> getAllowedList();
/**
*
*
* <pre>
* The list of ALLOW rules specified by this firewall. Each rule specifies a protocol and port-range tuple that describes a permitted connection.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.Allowed allowed = 162398632;</code>
*/
com.google.cloud.compute.v1.Allowed getAllowed(int index);
/**
*
*
* <pre>
* The list of ALLOW rules specified by this firewall. Each rule specifies a protocol and port-range tuple that describes a permitted connection.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.Allowed allowed = 162398632;</code>
*/
int getAllowedCount();
/**
*
*
* <pre>
* The list of ALLOW rules specified by this firewall. Each rule specifies a protocol and port-range tuple that describes a permitted connection.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.Allowed allowed = 162398632;</code>
*/
java.util.List<? extends com.google.cloud.compute.v1.AllowedOrBuilder> getAllowedOrBuilderList();
/**
*
*
* <pre>
* The list of ALLOW rules specified by this firewall. Each rule specifies a protocol and port-range tuple that describes a permitted connection.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.Allowed allowed = 162398632;</code>
*/
com.google.cloud.compute.v1.AllowedOrBuilder getAllowedOrBuilder(int index);
/**
*
*
* <pre>
* [Output Only] Creation timestamp in RFC3339 text format.
* </pre>
*
* <code>optional string creation_timestamp = 30525366;</code>
*
* @return Whether the creationTimestamp field is set.
*/
boolean hasCreationTimestamp();
/**
*
*
* <pre>
* [Output Only] Creation timestamp in RFC3339 text format.
* </pre>
*
* <code>optional string creation_timestamp = 30525366;</code>
*
* @return The creationTimestamp.
*/
java.lang.String getCreationTimestamp();
/**
*
*
* <pre>
* [Output Only] Creation timestamp in RFC3339 text format.
* </pre>
*
* <code>optional string creation_timestamp = 30525366;</code>
*
* @return The bytes for creationTimestamp.
*/
com.google.protobuf.ByteString getCreationTimestampBytes();
/**
*
*
* <pre>
* The list of DENY rules specified by this firewall. Each rule specifies a protocol and port-range tuple that describes a denied connection.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.Denied denied = 275217307;</code>
*/
java.util.List<com.google.cloud.compute.v1.Denied> getDeniedList();
/**
*
*
* <pre>
* The list of DENY rules specified by this firewall. Each rule specifies a protocol and port-range tuple that describes a denied connection.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.Denied denied = 275217307;</code>
*/
com.google.cloud.compute.v1.Denied getDenied(int index);
/**
*
*
* <pre>
* The list of DENY rules specified by this firewall. Each rule specifies a protocol and port-range tuple that describes a denied connection.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.Denied denied = 275217307;</code>
*/
int getDeniedCount();
/**
*
*
* <pre>
* The list of DENY rules specified by this firewall. Each rule specifies a protocol and port-range tuple that describes a denied connection.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.Denied denied = 275217307;</code>
*/
java.util.List<? extends com.google.cloud.compute.v1.DeniedOrBuilder> getDeniedOrBuilderList();
/**
*
*
* <pre>
* The list of DENY rules specified by this firewall. Each rule specifies a protocol and port-range tuple that describes a denied connection.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.Denied denied = 275217307;</code>
*/
com.google.cloud.compute.v1.DeniedOrBuilder getDeniedOrBuilder(int index);
/**
*
*
* <pre>
* An optional description of this resource. Provide this field when you create the resource.
* </pre>
*
* <code>optional string description = 422937596;</code>
*
* @return Whether the description field is set.
*/
boolean hasDescription();
/**
*
*
* <pre>
* An optional description of this resource. Provide this field when you create the resource.
* </pre>
*
* <code>optional string description = 422937596;</code>
*
* @return The description.
*/
java.lang.String getDescription();
/**
*
*
* <pre>
* An optional description of this resource. Provide this field when you create the resource.
* </pre>
*
* <code>optional string description = 422937596;</code>
*
* @return The bytes for description.
*/
com.google.protobuf.ByteString getDescriptionBytes();
/**
*
*
* <pre>
* If destination ranges are specified, the firewall rule applies only to traffic that has destination IP address in these ranges. These ranges must be expressed in CIDR format. Both IPv4 and IPv6 are supported.
* </pre>
*
* <code>repeated string destination_ranges = 305699879;</code>
*
* @return A list containing the destinationRanges.
*/
java.util.List<java.lang.String> getDestinationRangesList();
/**
*
*
* <pre>
* If destination ranges are specified, the firewall rule applies only to traffic that has destination IP address in these ranges. These ranges must be expressed in CIDR format. Both IPv4 and IPv6 are supported.
* </pre>
*
* <code>repeated string destination_ranges = 305699879;</code>
*
* @return The count of destinationRanges.
*/
int getDestinationRangesCount();
/**
*
*
* <pre>
* If destination ranges are specified, the firewall rule applies only to traffic that has destination IP address in these ranges. These ranges must be expressed in CIDR format. Both IPv4 and IPv6 are supported.
* </pre>
*
* <code>repeated string destination_ranges = 305699879;</code>
*
* @param index The index of the element to return.
* @return The destinationRanges at the given index.
*/
java.lang.String getDestinationRanges(int index);
/**
*
*
* <pre>
* If destination ranges are specified, the firewall rule applies only to traffic that has destination IP address in these ranges. These ranges must be expressed in CIDR format. Both IPv4 and IPv6 are supported.
* </pre>
*
* <code>repeated string destination_ranges = 305699879;</code>
*
* @param index The index of the value to return.
* @return The bytes of the destinationRanges at the given index.
*/
com.google.protobuf.ByteString getDestinationRangesBytes(int index);
/**
*
*
* <pre>
* Direction of traffic to which this firewall applies, either `INGRESS` or `EGRESS`. The default is `INGRESS`. For `EGRESS` traffic, you cannot specify the sourceTags fields.
* Check the Direction enum for the list of possible values.
* </pre>
*
* <code>optional string direction = 111150975;</code>
*
* @return Whether the direction field is set.
*/
boolean hasDirection();
/**
*
*
* <pre>
* Direction of traffic to which this firewall applies, either `INGRESS` or `EGRESS`. The default is `INGRESS`. For `EGRESS` traffic, you cannot specify the sourceTags fields.
* Check the Direction enum for the list of possible values.
* </pre>
*
* <code>optional string direction = 111150975;</code>
*
* @return The direction.
*/
java.lang.String getDirection();
/**
*
*
* <pre>
* Direction of traffic to which this firewall applies, either `INGRESS` or `EGRESS`. The default is `INGRESS`. For `EGRESS` traffic, you cannot specify the sourceTags fields.
* Check the Direction enum for the list of possible values.
* </pre>
*
* <code>optional string direction = 111150975;</code>
*
* @return The bytes for direction.
*/
com.google.protobuf.ByteString getDirectionBytes();
/**
*
*
* <pre>
* Denotes whether the firewall rule is disabled. When set to true, the firewall rule is not enforced and the network behaves as if it did not exist. If this is unspecified, the firewall rule will be enabled.
* </pre>
*
* <code>optional bool disabled = 270940796;</code>
*
* @return Whether the disabled field is set.
*/
boolean hasDisabled();
/**
*
*
* <pre>
* Denotes whether the firewall rule is disabled. When set to true, the firewall rule is not enforced and the network behaves as if it did not exist. If this is unspecified, the firewall rule will be enabled.
* </pre>
*
* <code>optional bool disabled = 270940796;</code>
*
* @return The disabled.
*/
boolean getDisabled();
/**
*
*
* <pre>
* [Output Only] The unique identifier for the resource. This identifier is defined by the server.
* </pre>
*
* <code>optional uint64 id = 3355;</code>
*
* @return Whether the id field is set.
*/
boolean hasId();
/**
*
*
* <pre>
* [Output Only] The unique identifier for the resource. This identifier is defined by the server.
* </pre>
*
* <code>optional uint64 id = 3355;</code>
*
* @return The id.
*/
long getId();
/**
*
*
* <pre>
* [Output Only] Type of the resource. Always compute#firewall for firewall rules.
* </pre>
*
* <code>optional string kind = 3292052;</code>
*
* @return Whether the kind field is set.
*/
boolean hasKind();
/**
*
*
* <pre>
* [Output Only] Type of the resource. Always compute#firewall for firewall rules.
* </pre>
*
* <code>optional string kind = 3292052;</code>
*
* @return The kind.
*/
java.lang.String getKind();
/**
*
*
* <pre>
* [Output Only] Type of the resource. Always compute#firewall for firewall rules.
* </pre>
*
* <code>optional string kind = 3292052;</code>
*
* @return The bytes for kind.
*/
com.google.protobuf.ByteString getKindBytes();
/**
*
*
* <pre>
* This field denotes the logging options for a particular firewall rule. If logging is enabled, logs will be exported to Cloud Logging.
* </pre>
*
* <code>optional .google.cloud.compute.v1.FirewallLogConfig log_config = 351299741;</code>
*
* @return Whether the logConfig field is set.
*/
boolean hasLogConfig();
/**
*
*
* <pre>
* This field denotes the logging options for a particular firewall rule. If logging is enabled, logs will be exported to Cloud Logging.
* </pre>
*
* <code>optional .google.cloud.compute.v1.FirewallLogConfig log_config = 351299741;</code>
*
* @return The logConfig.
*/
com.google.cloud.compute.v1.FirewallLogConfig getLogConfig();
/**
*
*
* <pre>
* This field denotes the logging options for a particular firewall rule. If logging is enabled, logs will be exported to Cloud Logging.
* </pre>
*
* <code>optional .google.cloud.compute.v1.FirewallLogConfig log_config = 351299741;</code>
*/
com.google.cloud.compute.v1.FirewallLogConfigOrBuilder getLogConfigOrBuilder();
/**
*
*
* <pre>
* Name of the resource; provided by the client when the resource is created. The name must be 1-63 characters long, and comply with RFC1035. Specifically, the name must be 1-63 characters long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?`. The first character must be a lowercase letter, and all following characters (except for the last character) must be a dash, lowercase letter, or digit. The last character must be a lowercase letter or digit.
* </pre>
*
* <code>optional string name = 3373707;</code>
*
* @return Whether the name field is set.
*/
boolean hasName();
/**
*
*
* <pre>
* Name of the resource; provided by the client when the resource is created. The name must be 1-63 characters long, and comply with RFC1035. Specifically, the name must be 1-63 characters long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?`. The first character must be a lowercase letter, and all following characters (except for the last character) must be a dash, lowercase letter, or digit. The last character must be a lowercase letter or digit.
* </pre>
*
* <code>optional string name = 3373707;</code>
*
* @return The name.
*/
java.lang.String getName();
/**
*
*
* <pre>
* Name of the resource; provided by the client when the resource is created. The name must be 1-63 characters long, and comply with RFC1035. Specifically, the name must be 1-63 characters long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?`. The first character must be a lowercase letter, and all following characters (except for the last character) must be a dash, lowercase letter, or digit. The last character must be a lowercase letter or digit.
* </pre>
*
* <code>optional string name = 3373707;</code>
*
* @return The bytes for name.
*/
com.google.protobuf.ByteString getNameBytes();
/**
*
*
* <pre>
* URL of the network resource for this firewall rule. If not specified when creating a firewall rule, the default network is used: global/networks/default If you choose to specify this field, you can specify the network as a full or partial URL. For example, the following are all valid URLs: - https://www.googleapis.com/compute/v1/projects/myproject/global/networks/my-network - projects/myproject/global/networks/my-network - global/networks/default
* </pre>
*
* <code>optional string network = 232872494;</code>
*
* @return Whether the network field is set.
*/
boolean hasNetwork();
/**
*
*
* <pre>
* URL of the network resource for this firewall rule. If not specified when creating a firewall rule, the default network is used: global/networks/default If you choose to specify this field, you can specify the network as a full or partial URL. For example, the following are all valid URLs: - https://www.googleapis.com/compute/v1/projects/myproject/global/networks/my-network - projects/myproject/global/networks/my-network - global/networks/default
* </pre>
*
* <code>optional string network = 232872494;</code>
*
* @return The network.
*/
java.lang.String getNetwork();
/**
*
*
* <pre>
* URL of the network resource for this firewall rule. If not specified when creating a firewall rule, the default network is used: global/networks/default If you choose to specify this field, you can specify the network as a full or partial URL. For example, the following are all valid URLs: - https://www.googleapis.com/compute/v1/projects/myproject/global/networks/my-network - projects/myproject/global/networks/my-network - global/networks/default
* </pre>
*
* <code>optional string network = 232872494;</code>
*
* @return The bytes for network.
*/
com.google.protobuf.ByteString getNetworkBytes();
/**
*
*
* <pre>
* Input only. [Input Only] Additional params passed with the request, but not persisted as part of resource payload.
* </pre>
*
* <code>optional .google.cloud.compute.v1.FirewallParams params = 78313862;</code>
*
* @return Whether the params field is set.
*/
boolean hasParams();
/**
*
*
* <pre>
* Input only. [Input Only] Additional params passed with the request, but not persisted as part of resource payload.
* </pre>
*
* <code>optional .google.cloud.compute.v1.FirewallParams params = 78313862;</code>
*
* @return The params.
*/
com.google.cloud.compute.v1.FirewallParams getParams();
/**
*
*
* <pre>
* Input only. [Input Only] Additional params passed with the request, but not persisted as part of resource payload.
* </pre>
*
* <code>optional .google.cloud.compute.v1.FirewallParams params = 78313862;</code>
*/
com.google.cloud.compute.v1.FirewallParamsOrBuilder getParamsOrBuilder();
/**
*
*
* <pre>
* Priority for this rule. This is an integer between `0` and `65535`, both inclusive. The default value is `1000`. Relative priorities determine which rule takes effect if multiple rules apply. Lower values indicate higher priority. For example, a rule with priority `0` has higher precedence than a rule with priority `1`. DENY rules take precedence over ALLOW rules if they have equal priority. Note that VPC networks have implied rules with a priority of `65535`. To avoid conflicts with the implied rules, use a priority number less than `65535`.
* </pre>
*
* <code>optional int32 priority = 445151652;</code>
*
* @return Whether the priority field is set.
*/
boolean hasPriority();
/**
*
*
* <pre>
* Priority for this rule. This is an integer between `0` and `65535`, both inclusive. The default value is `1000`. Relative priorities determine which rule takes effect if multiple rules apply. Lower values indicate higher priority. For example, a rule with priority `0` has higher precedence than a rule with priority `1`. DENY rules take precedence over ALLOW rules if they have equal priority. Note that VPC networks have implied rules with a priority of `65535`. To avoid conflicts with the implied rules, use a priority number less than `65535`.
* </pre>
*
* <code>optional int32 priority = 445151652;</code>
*
* @return The priority.
*/
int getPriority();
/**
*
*
* <pre>
* [Output Only] Server-defined URL for the resource.
* </pre>
*
* <code>optional string self_link = 456214797;</code>
*
* @return Whether the selfLink field is set.
*/
boolean hasSelfLink();
/**
*
*
* <pre>
* [Output Only] Server-defined URL for the resource.
* </pre>
*
* <code>optional string self_link = 456214797;</code>
*
* @return The selfLink.
*/
java.lang.String getSelfLink();
/**
*
*
* <pre>
* [Output Only] Server-defined URL for the resource.
* </pre>
*
* <code>optional string self_link = 456214797;</code>
*
* @return The bytes for selfLink.
*/
com.google.protobuf.ByteString getSelfLinkBytes();
/**
*
*
* <pre>
* If source ranges are specified, the firewall rule applies only to traffic that has a source IP address in these ranges. These ranges must be expressed in CIDR format. One or both of sourceRanges and sourceTags may be set. If both fields are set, the rule applies to traffic that has a source IP address within sourceRanges OR a source IP from a resource with a matching tag listed in the sourceTags field. The connection does not need to match both fields for the rule to apply. Both IPv4 and IPv6 are supported.
* </pre>
*
* <code>repeated string source_ranges = 200097658;</code>
*
* @return A list containing the sourceRanges.
*/
java.util.List<java.lang.String> getSourceRangesList();
/**
*
*
* <pre>
* If source ranges are specified, the firewall rule applies only to traffic that has a source IP address in these ranges. These ranges must be expressed in CIDR format. One or both of sourceRanges and sourceTags may be set. If both fields are set, the rule applies to traffic that has a source IP address within sourceRanges OR a source IP from a resource with a matching tag listed in the sourceTags field. The connection does not need to match both fields for the rule to apply. Both IPv4 and IPv6 are supported.
* </pre>
*
* <code>repeated string source_ranges = 200097658;</code>
*
* @return The count of sourceRanges.
*/
int getSourceRangesCount();
/**
*
*
* <pre>
* If source ranges are specified, the firewall rule applies only to traffic that has a source IP address in these ranges. These ranges must be expressed in CIDR format. One or both of sourceRanges and sourceTags may be set. If both fields are set, the rule applies to traffic that has a source IP address within sourceRanges OR a source IP from a resource with a matching tag listed in the sourceTags field. The connection does not need to match both fields for the rule to apply. Both IPv4 and IPv6 are supported.
* </pre>
*
* <code>repeated string source_ranges = 200097658;</code>
*
* @param index The index of the element to return.
* @return The sourceRanges at the given index.
*/
java.lang.String getSourceRanges(int index);
/**
*
*
* <pre>
* If source ranges are specified, the firewall rule applies only to traffic that has a source IP address in these ranges. These ranges must be expressed in CIDR format. One or both of sourceRanges and sourceTags may be set. If both fields are set, the rule applies to traffic that has a source IP address within sourceRanges OR a source IP from a resource with a matching tag listed in the sourceTags field. The connection does not need to match both fields for the rule to apply. Both IPv4 and IPv6 are supported.
* </pre>
*
* <code>repeated string source_ranges = 200097658;</code>
*
* @param index The index of the value to return.
* @return The bytes of the sourceRanges at the given index.
*/
com.google.protobuf.ByteString getSourceRangesBytes(int index);
/**
*
*
* <pre>
* If source service accounts are specified, the firewall rules apply only to traffic originating from an instance with a service account in this list. Source service accounts cannot be used to control traffic to an instance's external IP address because service accounts are associated with an instance, not an IP address. sourceRanges can be set at the same time as sourceServiceAccounts. If both are set, the firewall applies to traffic that has a source IP address within the sourceRanges OR a source IP that belongs to an instance with service account listed in sourceServiceAccount. The connection does not need to match both fields for the firewall to apply. sourceServiceAccounts cannot be used at the same time as sourceTags or targetTags.
* </pre>
*
* <code>repeated string source_service_accounts = 105100756;</code>
*
* @return A list containing the sourceServiceAccounts.
*/
java.util.List<java.lang.String> getSourceServiceAccountsList();
/**
*
*
* <pre>
* If source service accounts are specified, the firewall rules apply only to traffic originating from an instance with a service account in this list. Source service accounts cannot be used to control traffic to an instance's external IP address because service accounts are associated with an instance, not an IP address. sourceRanges can be set at the same time as sourceServiceAccounts. If both are set, the firewall applies to traffic that has a source IP address within the sourceRanges OR a source IP that belongs to an instance with service account listed in sourceServiceAccount. The connection does not need to match both fields for the firewall to apply. sourceServiceAccounts cannot be used at the same time as sourceTags or targetTags.
* </pre>
*
* <code>repeated string source_service_accounts = 105100756;</code>
*
* @return The count of sourceServiceAccounts.
*/
int getSourceServiceAccountsCount();
/**
*
*
* <pre>
* If source service accounts are specified, the firewall rules apply only to traffic originating from an instance with a service account in this list. Source service accounts cannot be used to control traffic to an instance's external IP address because service accounts are associated with an instance, not an IP address. sourceRanges can be set at the same time as sourceServiceAccounts. If both are set, the firewall applies to traffic that has a source IP address within the sourceRanges OR a source IP that belongs to an instance with service account listed in sourceServiceAccount. The connection does not need to match both fields for the firewall to apply. sourceServiceAccounts cannot be used at the same time as sourceTags or targetTags.
* </pre>
*
* <code>repeated string source_service_accounts = 105100756;</code>
*
* @param index The index of the element to return.
* @return The sourceServiceAccounts at the given index.
*/
java.lang.String getSourceServiceAccounts(int index);
/**
*
*
* <pre>
* If source service accounts are specified, the firewall rules apply only to traffic originating from an instance with a service account in this list. Source service accounts cannot be used to control traffic to an instance's external IP address because service accounts are associated with an instance, not an IP address. sourceRanges can be set at the same time as sourceServiceAccounts. If both are set, the firewall applies to traffic that has a source IP address within the sourceRanges OR a source IP that belongs to an instance with service account listed in sourceServiceAccount. The connection does not need to match both fields for the firewall to apply. sourceServiceAccounts cannot be used at the same time as sourceTags or targetTags.
* </pre>
*
* <code>repeated string source_service_accounts = 105100756;</code>
*
* @param index The index of the value to return.
* @return The bytes of the sourceServiceAccounts at the given index.
*/
com.google.protobuf.ByteString getSourceServiceAccountsBytes(int index);
/**
*
*
* <pre>
* If source tags are specified, the firewall rule applies only to traffic with source IPs that match the primary network interfaces of VM instances that have the tag and are in the same VPC network. Source tags cannot be used to control traffic to an instance's external IP address, it only applies to traffic between instances in the same virtual network. Because tags are associated with instances, not IP addresses. One or both of sourceRanges and sourceTags may be set. If both fields are set, the firewall applies to traffic that has a source IP address within sourceRanges OR a source IP from a resource with a matching tag listed in the sourceTags field. The connection does not need to match both fields for the firewall to apply.
* </pre>
*
* <code>repeated string source_tags = 452222397;</code>
*
* @return A list containing the sourceTags.
*/
java.util.List<java.lang.String> getSourceTagsList();
/**
*
*
* <pre>
* If source tags are specified, the firewall rule applies only to traffic with source IPs that match the primary network interfaces of VM instances that have the tag and are in the same VPC network. Source tags cannot be used to control traffic to an instance's external IP address, it only applies to traffic between instances in the same virtual network. Because tags are associated with instances, not IP addresses. One or both of sourceRanges and sourceTags may be set. If both fields are set, the firewall applies to traffic that has a source IP address within sourceRanges OR a source IP from a resource with a matching tag listed in the sourceTags field. The connection does not need to match both fields for the firewall to apply.
* </pre>
*
* <code>repeated string source_tags = 452222397;</code>
*
* @return The count of sourceTags.
*/
int getSourceTagsCount();
/**
*
*
* <pre>
* If source tags are specified, the firewall rule applies only to traffic with source IPs that match the primary network interfaces of VM instances that have the tag and are in the same VPC network. Source tags cannot be used to control traffic to an instance's external IP address, it only applies to traffic between instances in the same virtual network. Because tags are associated with instances, not IP addresses. One or both of sourceRanges and sourceTags may be set. If both fields are set, the firewall applies to traffic that has a source IP address within sourceRanges OR a source IP from a resource with a matching tag listed in the sourceTags field. The connection does not need to match both fields for the firewall to apply.
* </pre>
*
* <code>repeated string source_tags = 452222397;</code>
*
* @param index The index of the element to return.
* @return The sourceTags at the given index.
*/
java.lang.String getSourceTags(int index);
/**
*
*
* <pre>
* If source tags are specified, the firewall rule applies only to traffic with source IPs that match the primary network interfaces of VM instances that have the tag and are in the same VPC network. Source tags cannot be used to control traffic to an instance's external IP address, it only applies to traffic between instances in the same virtual network. Because tags are associated with instances, not IP addresses. One or both of sourceRanges and sourceTags may be set. If both fields are set, the firewall applies to traffic that has a source IP address within sourceRanges OR a source IP from a resource with a matching tag listed in the sourceTags field. The connection does not need to match both fields for the firewall to apply.
* </pre>
*
* <code>repeated string source_tags = 452222397;</code>
*
* @param index The index of the value to return.
* @return The bytes of the sourceTags at the given index.
*/
com.google.protobuf.ByteString getSourceTagsBytes(int index);
/**
*
*
* <pre>
* A list of service accounts indicating sets of instances located in the network that may make network connections as specified in allowed[]. targetServiceAccounts cannot be used at the same time as targetTags or sourceTags. If neither targetServiceAccounts nor targetTags are specified, the firewall rule applies to all instances on the specified network.
* </pre>
*
* <code>repeated string target_service_accounts = 457639710;</code>
*
* @return A list containing the targetServiceAccounts.
*/
java.util.List<java.lang.String> getTargetServiceAccountsList();
/**
*
*
* <pre>
* A list of service accounts indicating sets of instances located in the network that may make network connections as specified in allowed[]. targetServiceAccounts cannot be used at the same time as targetTags or sourceTags. If neither targetServiceAccounts nor targetTags are specified, the firewall rule applies to all instances on the specified network.
* </pre>
*
* <code>repeated string target_service_accounts = 457639710;</code>
*
* @return The count of targetServiceAccounts.
*/
int getTargetServiceAccountsCount();
/**
*
*
* <pre>
* A list of service accounts indicating sets of instances located in the network that may make network connections as specified in allowed[]. targetServiceAccounts cannot be used at the same time as targetTags or sourceTags. If neither targetServiceAccounts nor targetTags are specified, the firewall rule applies to all instances on the specified network.
* </pre>
*
* <code>repeated string target_service_accounts = 457639710;</code>
*
* @param index The index of the element to return.
* @return The targetServiceAccounts at the given index.
*/
java.lang.String getTargetServiceAccounts(int index);
/**
*
*
* <pre>
* A list of service accounts indicating sets of instances located in the network that may make network connections as specified in allowed[]. targetServiceAccounts cannot be used at the same time as targetTags or sourceTags. If neither targetServiceAccounts nor targetTags are specified, the firewall rule applies to all instances on the specified network.
* </pre>
*
* <code>repeated string target_service_accounts = 457639710;</code>
*
* @param index The index of the value to return.
* @return The bytes of the targetServiceAccounts at the given index.
*/
com.google.protobuf.ByteString getTargetServiceAccountsBytes(int index);
/**
*
*
* <pre>
* A list of tags that controls which instances the firewall rule applies to. If targetTags are specified, then the firewall rule applies only to instances in the VPC network that have one of those tags. If no targetTags are specified, the firewall rule applies to all instances on the specified network.
* </pre>
*
* <code>repeated string target_tags = 62901767;</code>
*
* @return A list containing the targetTags.
*/
java.util.List<java.lang.String> getTargetTagsList();
/**
*
*
* <pre>
* A list of tags that controls which instances the firewall rule applies to. If targetTags are specified, then the firewall rule applies only to instances in the VPC network that have one of those tags. If no targetTags are specified, the firewall rule applies to all instances on the specified network.
* </pre>
*
* <code>repeated string target_tags = 62901767;</code>
*
* @return The count of targetTags.
*/
int getTargetTagsCount();
/**
*
*
* <pre>
* A list of tags that controls which instances the firewall rule applies to. If targetTags are specified, then the firewall rule applies only to instances in the VPC network that have one of those tags. If no targetTags are specified, the firewall rule applies to all instances on the specified network.
* </pre>
*
* <code>repeated string target_tags = 62901767;</code>
*
* @param index The index of the element to return.
* @return The targetTags at the given index.
*/
java.lang.String getTargetTags(int index);
/**
*
*
* <pre>
* A list of tags that controls which instances the firewall rule applies to. If targetTags are specified, then the firewall rule applies only to instances in the VPC network that have one of those tags. If no targetTags are specified, the firewall rule applies to all instances on the specified network.
* </pre>
*
* <code>repeated string target_tags = 62901767;</code>
*
* @param index The index of the value to return.
* @return The bytes of the targetTags at the given index.
*/
com.google.protobuf.ByteString getTargetTagsBytes(int index);
}
|
openjdk/jdk8 | 35,040 | langtools/src/share/classes/com/sun/tools/jdeps/JdepsTask.java | /*
* Copyright (c) 2012, 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package com.sun.tools.jdeps;
import com.sun.tools.classfile.AccessFlags;
import com.sun.tools.classfile.ClassFile;
import com.sun.tools.classfile.ConstantPoolException;
import com.sun.tools.classfile.Dependencies;
import com.sun.tools.classfile.Dependencies.ClassFileError;
import com.sun.tools.classfile.Dependency;
import com.sun.tools.jdeps.PlatformClassPath.JDKArchive;
import java.io.*;
import java.nio.file.DirectoryStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.text.MessageFormat;
import java.util.*;
import java.util.regex.Pattern;
/**
* Implementation for the jdeps tool for static class dependency analysis.
*/
class JdepsTask {
static class BadArgs extends Exception {
static final long serialVersionUID = 8765093759964640721L;
BadArgs(String key, Object... args) {
super(JdepsTask.getMessage(key, args));
this.key = key;
this.args = args;
}
BadArgs showUsage(boolean b) {
showUsage = b;
return this;
}
final String key;
final Object[] args;
boolean showUsage;
}
static abstract class Option {
Option(boolean hasArg, String... aliases) {
this.hasArg = hasArg;
this.aliases = aliases;
}
boolean isHidden() {
return false;
}
boolean matches(String opt) {
for (String a : aliases) {
if (a.equals(opt))
return true;
if (hasArg && opt.startsWith(a + "="))
return true;
}
return false;
}
boolean ignoreRest() {
return false;
}
abstract void process(JdepsTask task, String opt, String arg) throws BadArgs;
final boolean hasArg;
final String[] aliases;
}
static abstract class HiddenOption extends Option {
HiddenOption(boolean hasArg, String... aliases) {
super(hasArg, aliases);
}
boolean isHidden() {
return true;
}
}
static Option[] recognizedOptions = {
new Option(false, "-h", "-?", "-help") {
void process(JdepsTask task, String opt, String arg) {
task.options.help = true;
}
},
new Option(true, "-dotoutput") {
void process(JdepsTask task, String opt, String arg) throws BadArgs {
Path p = Paths.get(arg);
if (Files.exists(p) && (!Files.isDirectory(p) || !Files.isWritable(p))) {
throw new BadArgs("err.dot.output.path", arg);
}
task.options.dotOutputDir = arg;
}
},
new Option(false, "-s", "-summary") {
void process(JdepsTask task, String opt, String arg) {
task.options.showSummary = true;
task.options.verbose = Analyzer.Type.SUMMARY;
}
},
new Option(false, "-v", "-verbose",
"-verbose:package",
"-verbose:class")
{
void process(JdepsTask task, String opt, String arg) throws BadArgs {
switch (opt) {
case "-v":
case "-verbose":
task.options.verbose = Analyzer.Type.VERBOSE;
break;
case "-verbose:package":
task.options.verbose = Analyzer.Type.PACKAGE;
break;
case "-verbose:class":
task.options.verbose = Analyzer.Type.CLASS;
break;
default:
throw new BadArgs("err.invalid.arg.for.option", opt);
}
}
},
new Option(true, "-cp", "-classpath") {
void process(JdepsTask task, String opt, String arg) {
task.options.classpath = arg;
}
},
new Option(true, "-p", "-package") {
void process(JdepsTask task, String opt, String arg) {
task.options.packageNames.add(arg);
}
},
new Option(true, "-e", "-regex") {
void process(JdepsTask task, String opt, String arg) {
task.options.regex = arg;
}
},
new Option(true, "-include") {
void process(JdepsTask task, String opt, String arg) throws BadArgs {
task.options.includePattern = Pattern.compile(arg);
}
},
new Option(false, "-P", "-profile") {
void process(JdepsTask task, String opt, String arg) throws BadArgs {
task.options.showProfile = true;
if (Profile.getProfileCount() == 0) {
throw new BadArgs("err.option.unsupported", opt, getMessage("err.profiles.msg"));
}
}
},
new Option(false, "-apionly") {
void process(JdepsTask task, String opt, String arg) {
task.options.apiOnly = true;
}
},
new Option(false, "-R", "-recursive") {
void process(JdepsTask task, String opt, String arg) {
task.options.depth = 0;
}
},
new Option(false, "-jdkinternals") {
void process(JdepsTask task, String opt, String arg) {
task.options.findJDKInternals = true;
task.options.verbose = Analyzer.Type.CLASS;
if (task.options.includePattern == null) {
task.options.includePattern = Pattern.compile(".*");
}
}
},
new Option(false, "-version") {
void process(JdepsTask task, String opt, String arg) {
task.options.version = true;
}
},
new HiddenOption(false, "-fullversion") {
void process(JdepsTask task, String opt, String arg) {
task.options.fullVersion = true;
}
},
new HiddenOption(false, "-showlabel") {
void process(JdepsTask task, String opt, String arg) {
task.options.showLabel = true;
}
},
new HiddenOption(true, "-depth") {
void process(JdepsTask task, String opt, String arg) throws BadArgs {
try {
task.options.depth = Integer.parseInt(arg);
} catch (NumberFormatException e) {
throw new BadArgs("err.invalid.arg.for.option", opt);
}
}
},
};
private static final String PROGNAME = "jdeps";
private final Options options = new Options();
private final List<String> classes = new ArrayList<String>();
private PrintWriter log;
void setLog(PrintWriter out) {
log = out;
}
/**
* Result codes.
*/
static final int EXIT_OK = 0, // Completed with no errors.
EXIT_ERROR = 1, // Completed but reported errors.
EXIT_CMDERR = 2, // Bad command-line arguments
EXIT_SYSERR = 3, // System error or resource exhaustion.
EXIT_ABNORMAL = 4;// terminated abnormally
int run(String[] args) {
if (log == null) {
log = new PrintWriter(System.out);
}
try {
handleOptions(args);
if (options.help) {
showHelp();
}
if (options.version || options.fullVersion) {
showVersion(options.fullVersion);
}
if (classes.isEmpty() && options.includePattern == null) {
if (options.help || options.version || options.fullVersion) {
return EXIT_OK;
} else {
showHelp();
return EXIT_CMDERR;
}
}
if (options.regex != null && options.packageNames.size() > 0) {
showHelp();
return EXIT_CMDERR;
}
if (options.findJDKInternals &&
(options.regex != null || options.packageNames.size() > 0 || options.showSummary)) {
showHelp();
return EXIT_CMDERR;
}
if (options.showSummary && options.verbose != Analyzer.Type.SUMMARY) {
showHelp();
return EXIT_CMDERR;
}
boolean ok = run();
return ok ? EXIT_OK : EXIT_ERROR;
} catch (BadArgs e) {
reportError(e.key, e.args);
if (e.showUsage) {
log.println(getMessage("main.usage.summary", PROGNAME));
}
return EXIT_CMDERR;
} catch (IOException e) {
return EXIT_ABNORMAL;
} finally {
log.flush();
}
}
private final List<Archive> sourceLocations = new ArrayList<>();
private boolean run() throws IOException {
findDependencies();
Analyzer analyzer = new Analyzer(options.verbose);
analyzer.run(sourceLocations);
if (options.dotOutputDir != null) {
Path dir = Paths.get(options.dotOutputDir);
Files.createDirectories(dir);
generateDotFiles(dir, analyzer);
} else {
printRawOutput(log, analyzer);
}
return true;
}
private void generateDotFiles(Path dir, Analyzer analyzer) throws IOException {
Path summary = dir.resolve("summary.dot");
boolean verbose = options.verbose == Analyzer.Type.VERBOSE;
DotGraph<?> graph = verbose ? new DotSummaryForPackage()
: new DotSummaryForArchive();
for (Archive archive : sourceLocations) {
analyzer.visitArchiveDependences(archive, graph);
if (verbose || options.showLabel) {
// traverse detailed dependences to generate package-level
// summary or build labels for edges
analyzer.visitDependences(archive, graph);
}
}
try (PrintWriter sw = new PrintWriter(Files.newOutputStream(summary))) {
graph.writeTo(sw);
}
// output individual .dot file for each archive
if (options.verbose != Analyzer.Type.SUMMARY) {
for (Archive archive : sourceLocations) {
if (analyzer.hasDependences(archive)) {
Path dotfile = dir.resolve(archive.getFileName() + ".dot");
try (PrintWriter pw = new PrintWriter(Files.newOutputStream(dotfile));
DotFileFormatter formatter = new DotFileFormatter(pw, archive)) {
analyzer.visitDependences(archive, formatter);
}
}
}
}
}
private void printRawOutput(PrintWriter writer, Analyzer analyzer) {
for (Archive archive : sourceLocations) {
RawOutputFormatter formatter = new RawOutputFormatter(writer);
analyzer.visitArchiveDependences(archive, formatter);
if (options.verbose != Analyzer.Type.SUMMARY) {
analyzer.visitDependences(archive, formatter);
}
}
}
private boolean isValidClassName(String name) {
if (!Character.isJavaIdentifierStart(name.charAt(0))) {
return false;
}
for (int i=1; i < name.length(); i++) {
char c = name.charAt(i);
if (c != '.' && !Character.isJavaIdentifierPart(c)) {
return false;
}
}
return true;
}
private Dependency.Filter getDependencyFilter() {
if (options.regex != null) {
return Dependencies.getRegexFilter(Pattern.compile(options.regex));
} else if (options.packageNames.size() > 0) {
return Dependencies.getPackageFilter(options.packageNames, false);
} else {
return new Dependency.Filter() {
@Override
public boolean accepts(Dependency dependency) {
return !dependency.getOrigin().equals(dependency.getTarget());
}
};
}
}
private boolean matches(String classname, AccessFlags flags) {
if (options.apiOnly && !flags.is(AccessFlags.ACC_PUBLIC)) {
return false;
} else if (options.includePattern != null) {
return options.includePattern.matcher(classname.replace('/', '.')).matches();
} else {
return true;
}
}
private void findDependencies() throws IOException {
Dependency.Finder finder =
options.apiOnly ? Dependencies.getAPIFinder(AccessFlags.ACC_PROTECTED)
: Dependencies.getClassDependencyFinder();
Dependency.Filter filter = getDependencyFilter();
List<Archive> archives = new ArrayList<>();
Deque<String> roots = new LinkedList<>();
for (String s : classes) {
Path p = Paths.get(s);
if (Files.exists(p)) {
archives.add(new Archive(p, ClassFileReader.newInstance(p)));
} else {
if (isValidClassName(s)) {
roots.add(s);
} else {
warning("warn.invalid.arg", s);
}
}
}
sourceLocations.addAll(archives);
List<Archive> classpaths = new ArrayList<>(); // for class file lookup
classpaths.addAll(getClassPathArchives(options.classpath));
if (options.includePattern != null) {
archives.addAll(classpaths);
}
classpaths.addAll(PlatformClassPath.getArchives());
// add all classpath archives to the source locations for reporting
sourceLocations.addAll(classpaths);
// Work queue of names of classfiles to be searched.
// Entries will be unique, and for classes that do not yet have
// dependencies in the results map.
Deque<String> deque = new LinkedList<>();
Set<String> doneClasses = new HashSet<>();
// get the immediate dependencies of the input files
for (Archive a : archives) {
for (ClassFile cf : a.reader().getClassFiles()) {
String classFileName;
try {
classFileName = cf.getName();
} catch (ConstantPoolException e) {
throw new ClassFileError(e);
}
if (matches(classFileName, cf.access_flags)) {
if (!doneClasses.contains(classFileName)) {
doneClasses.add(classFileName);
}
for (Dependency d : finder.findDependencies(cf)) {
if (filter.accepts(d)) {
String cn = d.getTarget().getName();
if (!doneClasses.contains(cn) && !deque.contains(cn)) {
deque.add(cn);
}
a.addClass(d.getOrigin(), d.getTarget());
}
}
}
}
}
// add Archive for looking up classes from the classpath
// for transitive dependency analysis
Deque<String> unresolved = roots;
int depth = options.depth > 0 ? options.depth : Integer.MAX_VALUE;
do {
String name;
while ((name = unresolved.poll()) != null) {
if (doneClasses.contains(name)) {
continue;
}
ClassFile cf = null;
for (Archive a : classpaths) {
cf = a.reader().getClassFile(name);
if (cf != null) {
String classFileName;
try {
classFileName = cf.getName();
} catch (ConstantPoolException e) {
throw new ClassFileError(e);
}
if (!doneClasses.contains(classFileName)) {
// if name is a fully-qualified class name specified
// from command-line, this class might already be parsed
doneClasses.add(classFileName);
for (Dependency d : finder.findDependencies(cf)) {
if (depth == 0) {
// ignore the dependency
a.addClass(d.getOrigin());
break;
} else if (filter.accepts(d)) {
a.addClass(d.getOrigin(), d.getTarget());
String cn = d.getTarget().getName();
if (!doneClasses.contains(cn) && !deque.contains(cn)) {
deque.add(cn);
}
}
}
}
break;
}
}
if (cf == null) {
doneClasses.add(name);
}
}
unresolved = deque;
deque = new LinkedList<>();
} while (!unresolved.isEmpty() && depth-- > 0);
}
public void handleOptions(String[] args) throws BadArgs {
// process options
for (int i=0; i < args.length; i++) {
if (args[i].charAt(0) == '-') {
String name = args[i];
Option option = getOption(name);
String param = null;
if (option.hasArg) {
if (name.startsWith("-") && name.indexOf('=') > 0) {
param = name.substring(name.indexOf('=') + 1, name.length());
} else if (i + 1 < args.length) {
param = args[++i];
}
if (param == null || param.isEmpty() || param.charAt(0) == '-') {
throw new BadArgs("err.missing.arg", name).showUsage(true);
}
}
option.process(this, name, param);
if (option.ignoreRest()) {
i = args.length;
}
} else {
// process rest of the input arguments
for (; i < args.length; i++) {
String name = args[i];
if (name.charAt(0) == '-') {
throw new BadArgs("err.option.after.class", name).showUsage(true);
}
classes.add(name);
}
}
}
}
private Option getOption(String name) throws BadArgs {
for (Option o : recognizedOptions) {
if (o.matches(name)) {
return o;
}
}
throw new BadArgs("err.unknown.option", name).showUsage(true);
}
private void reportError(String key, Object... args) {
log.println(getMessage("error.prefix") + " " + getMessage(key, args));
}
private void warning(String key, Object... args) {
log.println(getMessage("warn.prefix") + " " + getMessage(key, args));
}
private void showHelp() {
log.println(getMessage("main.usage", PROGNAME));
for (Option o : recognizedOptions) {
String name = o.aliases[0].substring(1); // there must always be at least one name
name = name.charAt(0) == '-' ? name.substring(1) : name;
if (o.isHidden() || name.equals("h")) {
continue;
}
log.println(getMessage("main.opt." + name));
}
}
private void showVersion(boolean full) {
log.println(version(full ? "full" : "release"));
}
private String version(String key) {
// key=version: mm.nn.oo[-milestone]
// key=full: mm.mm.oo[-milestone]-build
if (ResourceBundleHelper.versionRB == null) {
return System.getProperty("java.version");
}
try {
return ResourceBundleHelper.versionRB.getString(key);
} catch (MissingResourceException e) {
return getMessage("version.unknown", System.getProperty("java.version"));
}
}
static String getMessage(String key, Object... args) {
try {
return MessageFormat.format(ResourceBundleHelper.bundle.getString(key), args);
} catch (MissingResourceException e) {
throw new InternalError("Missing message: " + key);
}
}
private static class Options {
boolean help;
boolean version;
boolean fullVersion;
boolean showProfile;
boolean showSummary;
boolean wildcard;
boolean apiOnly;
boolean showLabel;
boolean findJDKInternals;
String dotOutputDir;
String classpath = "";
int depth = 1;
Analyzer.Type verbose = Analyzer.Type.PACKAGE;
Set<String> packageNames = new HashSet<>();
String regex; // apply to the dependences
Pattern includePattern; // apply to classes
}
private static class ResourceBundleHelper {
static final ResourceBundle versionRB;
static final ResourceBundle bundle;
static {
Locale locale = Locale.getDefault();
try {
bundle = ResourceBundle.getBundle("com.sun.tools.jdeps.resources.jdeps", locale);
} catch (MissingResourceException e) {
throw new InternalError("Cannot find jdeps resource bundle for locale " + locale);
}
try {
versionRB = ResourceBundle.getBundle("com.sun.tools.jdeps.resources.version");
} catch (MissingResourceException e) {
throw new InternalError("version.resource.missing");
}
}
}
private List<Archive> getArchives(List<String> filenames) throws IOException {
List<Archive> result = new ArrayList<Archive>();
for (String s : filenames) {
Path p = Paths.get(s);
if (Files.exists(p)) {
result.add(new Archive(p, ClassFileReader.newInstance(p)));
} else {
warning("warn.file.not.exist", s);
}
}
return result;
}
private List<Archive> getClassPathArchives(String paths) throws IOException {
List<Archive> result = new ArrayList<>();
if (paths.isEmpty()) {
return result;
}
for (String p : paths.split(File.pathSeparator)) {
if (p.length() > 0) {
List<Path> files = new ArrayList<>();
// wildcard to parse all JAR files e.g. -classpath dir/*
int i = p.lastIndexOf(".*");
if (i > 0) {
Path dir = Paths.get(p.substring(0, i));
try (DirectoryStream<Path> stream = Files.newDirectoryStream(dir, "*.jar")) {
for (Path entry : stream) {
files.add(entry);
}
}
} else {
files.add(Paths.get(p));
}
for (Path f : files) {
if (Files.exists(f)) {
result.add(new Archive(f, ClassFileReader.newInstance(f)));
}
}
}
}
return result;
}
/**
* If the given archive is JDK archive and non-null Profile,
* this method returns the profile name only if -profile option is specified;
* a null profile indicates it accesses a private JDK API and this method
* will return "JDK internal API".
*
* For non-JDK archives, this method returns the file name of the archive.
*/
private String getProfileArchiveInfo(Archive source, Profile profile) {
if (options.showProfile && profile != null)
return profile.toString();
if (source instanceof JDKArchive) {
return profile == null ? "JDK internal API (" + source.getFileName() + ")" : "";
}
return source.getFileName();
}
/**
* Returns the profile name or "JDK internal API" for JDK archive;
* otherwise empty string.
*/
private String profileName(Archive archive, Profile profile) {
if (archive instanceof JDKArchive) {
return Objects.toString(profile, "JDK internal API");
} else {
return "";
}
}
class RawOutputFormatter implements Analyzer.Visitor {
private final PrintWriter writer;
RawOutputFormatter(PrintWriter writer) {
this.writer = writer;
}
private String pkg = "";
@Override
public void visitDependence(String origin, Archive source,
String target, Archive archive, Profile profile) {
if (options.findJDKInternals &&
!(archive instanceof JDKArchive && profile == null)) {
// filter dependences other than JDK internal APIs
return;
}
if (options.verbose == Analyzer.Type.VERBOSE) {
writer.format(" %-50s -> %-50s %s%n",
origin, target, getProfileArchiveInfo(archive, profile));
} else {
if (!origin.equals(pkg)) {
pkg = origin;
writer.format(" %s (%s)%n", origin, source.getFileName());
}
writer.format(" -> %-50s %s%n",
target, getProfileArchiveInfo(archive, profile));
}
}
@Override
public void visitArchiveDependence(Archive origin, Archive target, Profile profile) {
writer.format("%s -> %s", origin.getPathName(), target.getPathName());
if (options.showProfile && profile != null) {
writer.format(" (%s)%n", profile);
} else {
writer.format("%n");
}
}
}
class DotFileFormatter extends DotGraph<String> implements AutoCloseable {
private final PrintWriter writer;
private final String name;
DotFileFormatter(PrintWriter writer, Archive archive) {
this.writer = writer;
this.name = archive.getFileName();
writer.format("digraph \"%s\" {%n", name);
writer.format(" // Path: %s%n", archive.getPathName());
}
@Override
public void close() {
writer.println("}");
}
@Override
public void visitDependence(String origin, Archive source,
String target, Archive archive, Profile profile) {
if (options.findJDKInternals &&
!(archive instanceof JDKArchive && profile == null)) {
// filter dependences other than JDK internal APIs
return;
}
// if -P option is specified, package name -> profile will
// be shown and filter out multiple same edges.
String name = getProfileArchiveInfo(archive, profile);
writeEdge(writer, new Edge(origin, target, getProfileArchiveInfo(archive, profile)));
}
@Override
public void visitArchiveDependence(Archive origin, Archive target, Profile profile) {
throw new UnsupportedOperationException();
}
}
class DotSummaryForArchive extends DotGraph<Archive> {
@Override
public void visitDependence(String origin, Archive source,
String target, Archive archive, Profile profile) {
Edge e = findEdge(source, archive);
assert e != null;
// add the dependency to the label if enabled and not compact1
if (profile == Profile.COMPACT1) {
return;
}
e.addLabel(origin, target, profileName(archive, profile));
}
@Override
public void visitArchiveDependence(Archive origin, Archive target, Profile profile) {
// add an edge with the archive's name with no tag
// so that there is only one node for each JDK archive
// while there may be edges to different profiles
Edge e = addEdge(origin, target, "");
if (target instanceof JDKArchive) {
// add a label to print the profile
if (profile == null) {
e.addLabel("JDK internal API");
} else if (options.showProfile && !options.showLabel) {
e.addLabel(profile.toString());
}
}
}
}
// DotSummaryForPackage generates the summary.dot file for verbose mode
// (-v or -verbose option) that includes all class dependencies.
// The summary.dot file shows package-level dependencies.
class DotSummaryForPackage extends DotGraph<String> {
private String packageOf(String cn) {
int i = cn.lastIndexOf('.');
return i > 0 ? cn.substring(0, i) : "<unnamed>";
}
@Override
public void visitDependence(String origin, Archive source,
String target, Archive archive, Profile profile) {
// add a package dependency edge
String from = packageOf(origin);
String to = packageOf(target);
Edge e = addEdge(from, to, getProfileArchiveInfo(archive, profile));
// add the dependency to the label if enabled and not compact1
if (!options.showLabel || profile == Profile.COMPACT1) {
return;
}
// trim the package name of origin to shorten the label
int i = origin.lastIndexOf('.');
String n1 = i < 0 ? origin : origin.substring(i+1);
e.addLabel(n1, target, profileName(archive, profile));
}
@Override
public void visitArchiveDependence(Archive origin, Archive target, Profile profile) {
// nop
}
}
abstract class DotGraph<T> implements Analyzer.Visitor {
private final Set<Edge> edges = new LinkedHashSet<>();
private Edge curEdge;
public void writeTo(PrintWriter writer) {
writer.format("digraph \"summary\" {%n");
for (Edge e: edges) {
writeEdge(writer, e);
}
writer.println("}");
}
void writeEdge(PrintWriter writer, Edge e) {
writer.format(" %-50s -> \"%s\"%s;%n",
String.format("\"%s\"", e.from.toString()),
e.tag.isEmpty() ? e.to
: String.format("%s (%s)", e.to, e.tag),
getLabel(e));
}
Edge addEdge(T origin, T target, String tag) {
Edge e = new Edge(origin, target, tag);
if (e.equals(curEdge)) {
return curEdge;
}
if (edges.contains(e)) {
for (Edge e1 : edges) {
if (e.equals(e1)) {
curEdge = e1;
}
}
} else {
edges.add(e);
curEdge = e;
}
return curEdge;
}
Edge findEdge(T origin, T target) {
for (Edge e : edges) {
if (e.from.equals(origin) && e.to.equals(target)) {
return e;
}
}
return null;
}
String getLabel(Edge e) {
String label = e.label.toString();
return label.isEmpty() ? "" : String.format("[label=\"%s\",fontsize=9]", label);
}
class Edge {
final T from;
final T to;
final String tag; // optional tag
final StringBuilder label = new StringBuilder();
Edge(T from, T to, String tag) {
this.from = from;
this.to = to;
this.tag = tag;
}
void addLabel(String s) {
label.append(s).append("\\n");
}
void addLabel(String origin, String target, String profile) {
label.append(origin).append(" -> ").append(target);
if (!profile.isEmpty()) {
label.append(" (" + profile + ")");
}
label.append("\\n");
}
@Override @SuppressWarnings("unchecked")
public boolean equals(Object o) {
if (o instanceof DotGraph<?>.Edge) {
DotGraph<?>.Edge e = (DotGraph<?>.Edge)o;
return this.from.equals(e.from) &&
this.to.equals(e.to) &&
this.tag.equals(e.tag);
}
return false;
}
@Override
public int hashCode() {
int hash = 7;
hash = 67 * hash + Objects.hashCode(this.from) +
Objects.hashCode(this.to) + Objects.hashCode(this.tag);
return hash;
}
}
}
}
|
apache/lucene | 35,026 | lucene/core/src/test/org/apache/lucene/search/TestIndexSortSortedNumericDocValuesRangeQuery.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.lucene.search;
import static org.hamcrest.Matchers.instanceOf;
import java.io.IOException;
import java.util.Random;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.IntPoint;
import org.apache.lucene.document.LongPoint;
import org.apache.lucene.document.SortedNumericDocValuesField;
import org.apache.lucene.document.StringField;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.store.Directory;
import org.apache.lucene.tests.analysis.MockAnalyzer;
import org.apache.lucene.tests.index.RandomIndexWriter;
import org.apache.lucene.tests.search.DummyTotalHitCountCollector;
import org.apache.lucene.tests.search.QueryUtils;
import org.apache.lucene.tests.util.LuceneTestCase;
import org.apache.lucene.tests.util.TestUtil;
import org.hamcrest.MatcherAssert;
@LuceneTestCase.SuppressCodecs(value = "SimpleText")
public class TestIndexSortSortedNumericDocValuesRangeQuery extends LuceneTestCase {
public void testSameHitsAsPointRangeQuery() throws IOException {
final int iters = atLeast(10);
for (int iter = 0; iter < iters; ++iter) {
Directory dir = newDirectory();
IndexWriterConfig iwc = new IndexWriterConfig(new MockAnalyzer(random()));
boolean reverse = random().nextBoolean();
SortField sortField = new SortedNumericSortField("dv", SortField.Type.LONG, reverse);
boolean enableMissingValue = random().nextBoolean();
if (enableMissingValue) {
long missingValue =
random().nextBoolean()
? TestUtil.nextLong(random(), -100, 10000)
: (random().nextBoolean() ? Long.MIN_VALUE : Long.MAX_VALUE);
sortField.setMissingValue(missingValue);
}
iwc.setIndexSort(new Sort(sortField));
RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc);
final int numDocs = atLeast(100);
for (int i = 0; i < numDocs; ++i) {
Document doc = new Document();
final int numValues = TestUtil.nextInt(random(), 0, 1);
for (int j = 0; j < numValues; ++j) {
final long value = TestUtil.nextLong(random(), -100, 10000);
doc.add(new SortedNumericDocValuesField("dv", value));
doc.add(new LongPoint("idx", value));
}
iw.addDocument(doc);
}
if (random().nextBoolean()) {
iw.deleteDocuments(LongPoint.newRangeQuery("idx", 0L, 10L));
}
final IndexReader reader = iw.getReader();
final IndexSearcher searcher = newSearcher(reader);
iw.close();
for (int i = 0; i < 100; ++i) {
final long min =
random().nextBoolean() ? Long.MIN_VALUE : TestUtil.nextLong(random(), -100, 10000);
final long max =
random().nextBoolean() ? Long.MAX_VALUE : TestUtil.nextLong(random(), -100, 10000);
final Query q1 = LongPoint.newRangeQuery("idx", min, max);
final Query q2 = createQuery("dv", min, max);
assertSameHits(searcher, q1, q2, false);
}
reader.close();
dir.close();
}
}
public void testSameHitsAsPointRangeQueryIntSort() throws IOException {
final int iters = atLeast(10);
for (int iter = 0; iter < iters; ++iter) {
Directory dir = newDirectory();
IndexWriterConfig iwc = new IndexWriterConfig(new MockAnalyzer(random()));
boolean reverse = random().nextBoolean();
SortField sortField = new SortedNumericSortField("dv", SortField.Type.INT, reverse);
boolean enableMissingValue = random().nextBoolean();
if (enableMissingValue) {
int missingValue =
random().nextBoolean()
? TestUtil.nextInt(random(), -100, 10000)
: (random().nextBoolean() ? Integer.MIN_VALUE : Integer.MAX_VALUE);
sortField.setMissingValue(missingValue);
}
iwc.setIndexSort(new Sort(sortField));
RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc);
final int numDocs = atLeast(100);
for (int i = 0; i < numDocs; ++i) {
Document doc = new Document();
final int numValues = TestUtil.nextInt(random(), 0, 1);
for (int j = 0; j < numValues; ++j) {
final int value = TestUtil.nextInt(random(), -100, 10000);
doc.add(new SortedNumericDocValuesField("dv", value));
doc.add(new IntPoint("idx", value));
}
iw.addDocument(doc);
}
if (random().nextBoolean()) {
iw.deleteDocuments(IntPoint.newRangeQuery("idx", 0, 10));
}
final IndexReader reader = iw.getReader();
final IndexSearcher searcher = newSearcher(reader);
iw.close();
for (int i = 0; i < 100; ++i) {
final int min =
random().nextBoolean() ? Integer.MIN_VALUE : TestUtil.nextInt(random(), -100, 10000);
final int max =
random().nextBoolean() ? Integer.MAX_VALUE : TestUtil.nextInt(random(), -100, 10000);
final Query q1 = IntPoint.newRangeQuery("idx", min, max);
final Query q2 = createQuery("dv", min, max);
assertSameHits(searcher, q1, q2, false);
}
reader.close();
dir.close();
}
}
private static void assertSameHits(IndexSearcher searcher, Query q1, Query q2, boolean scores)
throws IOException {
final int maxDoc = searcher.getIndexReader().maxDoc();
final TopDocs td1 = searcher.search(q1, maxDoc, scores ? Sort.RELEVANCE : Sort.INDEXORDER);
final TopDocs td2 = searcher.search(q2, maxDoc, scores ? Sort.RELEVANCE : Sort.INDEXORDER);
assertEquals(td1.totalHits.value(), td2.totalHits.value());
for (int i = 0; i < td1.scoreDocs.length; ++i) {
assertEquals(td1.scoreDocs[i].doc, td2.scoreDocs[i].doc);
if (scores) {
assertEquals(td1.scoreDocs[i].score, td2.scoreDocs[i].score, 10e-7);
}
}
}
public void testEquals() {
Query q1 = createQuery("foo", 3, 5);
QueryUtils.checkEqual(q1, createQuery("foo", 3, 5));
QueryUtils.checkUnequal(q1, createQuery("foo", 3, 6));
QueryUtils.checkUnequal(q1, createQuery("foo", 4, 5));
QueryUtils.checkUnequal(q1, createQuery("bar", 3, 5));
}
public void testToString() {
Query q1 = createQuery("foo", 3, 5);
assertEquals("foo:[3 TO 5]", q1.toString());
assertEquals("[3 TO 5]", q1.toString("foo"));
assertEquals("foo:[3 TO 5]", q1.toString("bar"));
}
public void testIndexSortDocValuesWithEvenLength() throws Exception {
for (SortField.Type type : new SortField.Type[] {SortField.Type.INT, SortField.Type.LONG}) {
testIndexSortDocValuesWithEvenLength(true, type);
testIndexSortDocValuesWithEvenLength(false, type);
}
}
public void testIndexSortDocValuesWithEvenLength(boolean reverse, SortField.Type type)
throws Exception {
Directory dir = newDirectory();
IndexWriterConfig iwc = new IndexWriterConfig(new MockAnalyzer(random()));
Sort indexSort = new Sort(new SortedNumericSortField("field", type, reverse));
iwc.setIndexSort(indexSort);
RandomIndexWriter writer = new RandomIndexWriter(random(), dir, iwc);
writer.addDocument(createDocument("field", -80));
writer.addDocument(createDocument("field", -5));
writer.addDocument(createDocument("field", 0));
writer.addDocument(createDocument("field", 0));
writer.addDocument(createDocument("field", 30));
writer.addDocument(createDocument("field", 35));
DirectoryReader reader = writer.getReader();
IndexSearcher searcher = newSearcher(reader);
// Test ranges consisting of one value.
assertNumberOfHits(searcher, createQuery("field", -80, -80), 1);
assertNumberOfHits(searcher, createQuery("field", -5, -5), 1);
assertNumberOfHits(searcher, createQuery("field", 0, 0), 2);
assertNumberOfHits(searcher, createQuery("field", 30, 30), 1);
assertNumberOfHits(searcher, createQuery("field", 35, 35), 1);
assertNumberOfHits(searcher, createQuery("field", -90, -90), 0);
assertNumberOfHits(searcher, createQuery("field", 5, 5), 0);
assertNumberOfHits(searcher, createQuery("field", 40, 40), 0);
// Test the lower end of the document value range.
assertNumberOfHits(searcher, createQuery("field", -90, -4), 2);
assertNumberOfHits(searcher, createQuery("field", -80, -4), 2);
assertNumberOfHits(searcher, createQuery("field", -70, -4), 1);
assertNumberOfHits(searcher, createQuery("field", -80, -5), 2);
// Test the upper end of the document value range.
assertNumberOfHits(searcher, createQuery("field", 25, 34), 1);
assertNumberOfHits(searcher, createQuery("field", 25, 35), 2);
assertNumberOfHits(searcher, createQuery("field", 25, 36), 2);
assertNumberOfHits(searcher, createQuery("field", 30, 35), 2);
// Test multiple occurrences of the same value.
assertNumberOfHits(searcher, createQuery("field", -4, 4), 2);
assertNumberOfHits(searcher, createQuery("field", -4, 0), 2);
assertNumberOfHits(searcher, createQuery("field", 0, 4), 2);
assertNumberOfHits(searcher, createQuery("field", 0, 30), 3);
// Test ranges that span all documents.
assertNumberOfHits(searcher, createQuery("field", -80, 35), 6);
assertNumberOfHits(searcher, createQuery("field", -90, 40), 6);
writer.close();
reader.close();
dir.close();
}
private static void assertNumberOfHits(IndexSearcher searcher, Query query, int numberOfHits)
throws IOException {
assertEquals(
numberOfHits,
searcher.search(query, DummyTotalHitCountCollector.createManager()).intValue());
assertEquals(numberOfHits, searcher.count(query));
}
public void testIndexSortDocValuesWithOddLength() throws Exception {
testIndexSortDocValuesWithOddLength(false);
testIndexSortDocValuesWithOddLength(true);
}
public void testIndexSortDocValuesWithOddLength(boolean reverse) throws Exception {
Directory dir = newDirectory();
IndexWriterConfig iwc = new IndexWriterConfig(new MockAnalyzer(random()));
Sort indexSort = new Sort(new SortedNumericSortField("field", SortField.Type.LONG, reverse));
iwc.setIndexSort(indexSort);
RandomIndexWriter writer = new RandomIndexWriter(random(), dir, iwc);
writer.addDocument(createDocument("field", -80));
writer.addDocument(createDocument("field", -5));
writer.addDocument(createDocument("field", 0));
writer.addDocument(createDocument("field", 0));
writer.addDocument(createDocument("field", 5));
writer.addDocument(createDocument("field", 30));
writer.addDocument(createDocument("field", 35));
DirectoryReader reader = writer.getReader();
IndexSearcher searcher = newSearcher(reader);
// Test ranges consisting of one value.
assertNumberOfHits(searcher, createQuery("field", -80, -80), 1);
assertNumberOfHits(searcher, createQuery("field", -5, -5), 1);
assertNumberOfHits(searcher, createQuery("field", 0, 0), 2);
assertNumberOfHits(searcher, createQuery("field", 5, 5), 1);
assertNumberOfHits(searcher, createQuery("field", 30, 30), 1);
assertNumberOfHits(searcher, createQuery("field", 35, 35), 1);
assertNumberOfHits(searcher, createQuery("field", -90, -90), 0);
assertNumberOfHits(searcher, createQuery("field", 6, 6), 0);
assertNumberOfHits(searcher, createQuery("field", 40, 40), 0);
// Test the lower end of the document value range.
assertNumberOfHits(searcher, createQuery("field", -90, -4), 2);
assertNumberOfHits(searcher, createQuery("field", -80, -4), 2);
assertNumberOfHits(searcher, createQuery("field", -70, -4), 1);
assertNumberOfHits(searcher, createQuery("field", -80, -5), 2);
// Test the upper end of the document value range.
assertNumberOfHits(searcher, createQuery("field", 25, 34), 1);
assertNumberOfHits(searcher, createQuery("field", 25, 35), 2);
assertNumberOfHits(searcher, createQuery("field", 25, 36), 2);
assertNumberOfHits(searcher, createQuery("field", 30, 35), 2);
// Test multiple occurrences of the same value.
assertNumberOfHits(searcher, createQuery("field", -4, 4), 2);
assertNumberOfHits(searcher, createQuery("field", -4, 0), 2);
assertNumberOfHits(searcher, createQuery("field", 0, 4), 2);
assertNumberOfHits(searcher, createQuery("field", 0, 30), 4);
// Test ranges that span all documents.
assertNumberOfHits(searcher, createQuery("field", -80, 35), 7);
assertNumberOfHits(searcher, createQuery("field", -90, 40), 7);
writer.close();
reader.close();
dir.close();
}
public void testIndexSortDocValuesWithSingleValue() throws Exception {
testIndexSortDocValuesWithSingleValue(false);
testIndexSortDocValuesWithSingleValue(true);
}
private void testIndexSortDocValuesWithSingleValue(boolean reverse) throws IOException {
Directory dir = newDirectory();
IndexWriterConfig iwc = new IndexWriterConfig(new MockAnalyzer(random()));
Sort indexSort = new Sort(new SortedNumericSortField("field", SortField.Type.LONG, reverse));
iwc.setIndexSort(indexSort);
RandomIndexWriter writer = new RandomIndexWriter(random(), dir, iwc);
writer.addDocument(createDocument("field", 42));
DirectoryReader reader = writer.getReader();
IndexSearcher searcher = newSearcher(reader);
assertNumberOfHits(searcher, createQuery("field", 42, 43), 1);
assertNumberOfHits(searcher, createQuery("field", 42, 42), 1);
assertNumberOfHits(searcher, createQuery("field", 41, 41), 0);
assertNumberOfHits(searcher, createQuery("field", 43, 43), 0);
writer.close();
reader.close();
dir.close();
}
public void testIndexSortMissingValues() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig iwc = new IndexWriterConfig(new MockAnalyzer(random()));
SortField sortField = new SortedNumericSortField("field", SortField.Type.LONG);
sortField.setMissingValue(random().nextLong());
iwc.setIndexSort(new Sort(sortField));
RandomIndexWriter writer = new RandomIndexWriter(random(), dir, iwc);
writer.addDocument(createDocument("field", -80));
writer.addDocument(createDocument("field", -5));
writer.addDocument(createDocument("field", 0));
writer.addDocument(createDocument("field", 35));
writer.addDocument(createDocument("other-field", 0));
writer.addDocument(createDocument("other-field", 10));
writer.addDocument(createDocument("other-field", 20));
DirectoryReader reader = writer.getReader();
IndexSearcher searcher = newSearcher(reader);
assertNumberOfHits(searcher, createQuery("field", -70, 0), 2);
assertNumberOfHits(searcher, createQuery("field", -2, 35), 2);
assertNumberOfHits(searcher, createQuery("field", -80, 35), 4);
assertNumberOfHits(searcher, createQuery("field", Long.MIN_VALUE, Long.MAX_VALUE), 4);
writer.close();
reader.close();
dir.close();
}
public void testNoDocuments() throws IOException {
Directory dir = newDirectory();
RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
writer.addDocument(new Document());
IndexReader reader = writer.getReader();
IndexSearcher searcher = newSearcher(reader);
Query query = createQuery("foo", 2, 4);
Weight w = searcher.createWeight(searcher.rewrite(query), ScoreMode.COMPLETE, 1);
assertNull(w.scorer(searcher.getIndexReader().leaves().get(0)));
writer.close();
reader.close();
dir.close();
}
public void testRewriteExhaustiveRange() throws IOException {
Directory dir = newDirectory();
RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
writer.addDocument(new Document());
IndexReader reader = writer.getReader();
Query query = createQuery("field", Long.MIN_VALUE, Long.MAX_VALUE);
Query rewrittenQuery = query.rewrite(newSearcher(reader));
assertEquals(new FieldExistsQuery("field"), rewrittenQuery);
writer.close();
reader.close();
dir.close();
}
public void testRewriteFallbackQuery() throws IOException {
Directory dir = newDirectory();
RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
writer.addDocument(new Document());
IndexReader reader = writer.getReader();
// Create an (unrealistic) fallback query that is sure to be rewritten.
Query fallbackQuery = new BooleanQuery.Builder().build();
Query query = new IndexSortSortedNumericDocValuesRangeQuery("field", 1, 42, fallbackQuery);
Query rewrittenQuery = query.rewrite(newSearcher(reader));
assertNotEquals(query, rewrittenQuery);
MatcherAssert.assertThat(
rewrittenQuery, instanceOf(IndexSortSortedNumericDocValuesRangeQuery.class));
IndexSortSortedNumericDocValuesRangeQuery rangeQuery =
(IndexSortSortedNumericDocValuesRangeQuery) rewrittenQuery;
assertEquals(new MatchNoDocsQuery(), rangeQuery.getFallbackQuery());
writer.close();
reader.close();
dir.close();
}
/** Test that the index sort optimization not activated if there is no index sort. */
public void testNoIndexSort() throws Exception {
Directory dir = newDirectory();
RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
writer.addDocument(createDocument("field", 0));
testIndexSortOptimizationDeactivated(writer);
writer.close();
dir.close();
}
/** Test that the index sort optimization is not activated when the sort is on the wrong field. */
public void testIndexSortOnWrongField() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig iwc = new IndexWriterConfig(new MockAnalyzer(random()));
Sort indexSort = new Sort(new SortedNumericSortField("other-field", SortField.Type.LONG));
iwc.setIndexSort(indexSort);
RandomIndexWriter writer = new RandomIndexWriter(random(), dir, iwc);
writer.addDocument(createDocument("field", 0));
testIndexSortOptimizationDeactivated(writer);
writer.close();
dir.close();
}
public void testOtherSortTypes() throws Exception {
for (SortField.Type type : new SortField.Type[] {SortField.Type.FLOAT, SortField.Type.DOUBLE}) {
Directory dir = newDirectory();
IndexWriterConfig iwc = new IndexWriterConfig(new MockAnalyzer(random()));
Sort indexSort = new Sort(new SortedNumericSortField("field", type));
iwc.setIndexSort(indexSort);
RandomIndexWriter writer = new RandomIndexWriter(random(), dir, iwc);
writer.addDocument(createDocument("field", 0));
testIndexSortOptimizationDeactivated(writer);
writer.close();
dir.close();
}
}
/**
* Test that the index sort optimization is not activated when some documents have multiple
* values.
*/
public void testMultiDocValues() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig iwc = new IndexWriterConfig(new MockAnalyzer(random()));
Sort indexSort = new Sort(new SortedNumericSortField("field", SortField.Type.LONG));
iwc.setIndexSort(indexSort);
RandomIndexWriter writer = new RandomIndexWriter(random(), dir, iwc);
Document doc = new Document();
doc.add(new SortedNumericDocValuesField("field", 0));
doc.add(new SortedNumericDocValuesField("field", 10));
writer.addDocument(doc);
testIndexSortOptimizationDeactivated(writer);
writer.close();
dir.close();
}
public void testIndexSortOptimizationDeactivated(RandomIndexWriter writer) throws IOException {
DirectoryReader reader = writer.getReader();
IndexSearcher searcher = newSearcher(reader);
Query query = createQuery("field", 0, 0);
Weight weight = query.createWeight(searcher, ScoreMode.TOP_SCORES, 1.0F);
// Check that the two-phase iterator is not null, indicating that we've fallen
// back to SortedNumericDocValuesField.newSlowRangeQuery.
for (LeafReaderContext context : searcher.getIndexReader().leaves()) {
Scorer scorer = weight.scorer(context);
assertNotNull(scorer.twoPhaseIterator());
}
reader.close();
}
public void testFallbackCount() throws IOException {
Directory dir = newDirectory();
IndexWriterConfig iwc = new IndexWriterConfig(new MockAnalyzer(random()));
Sort indexSort = new Sort(new SortedNumericSortField("field", SortField.Type.LONG));
iwc.setIndexSort(indexSort);
RandomIndexWriter writer = new RandomIndexWriter(random(), dir, iwc);
Document doc = new Document();
doc.add(new SortedNumericDocValuesField("field", 10));
writer.addDocument(doc);
IndexReader reader = writer.getReader();
IndexSearcher searcher = newSearcher(reader);
// we use an unrealistic query that exposes its own Weight#count
Query fallbackQuery = new MatchNoDocsQuery();
// the index is not sorted on this field, the fallback query is used
Query query = new IndexSortSortedNumericDocValuesRangeQuery("another", 1, 42, fallbackQuery);
Weight weight = query.createWeight(searcher, ScoreMode.COMPLETE, 1.0f);
for (LeafReaderContext context : searcher.getLeafContexts()) {
assertEquals(0, weight.count(context));
}
writer.close();
reader.close();
dir.close();
}
public void testCompareCount() throws IOException {
final int iters = atLeast(10);
for (int iter = 0; iter < iters; ++iter) {
Directory dir = newDirectory();
IndexWriterConfig iwc = new IndexWriterConfig(new MockAnalyzer(random()));
SortField sortField = new SortedNumericSortField("field", SortField.Type.LONG);
boolean enableMissingValue = random().nextBoolean();
if (enableMissingValue) {
long missingValue =
random().nextBoolean()
? TestUtil.nextLong(random(), -100, 10000)
: (random().nextBoolean() ? Long.MIN_VALUE : Long.MAX_VALUE);
sortField.setMissingValue(missingValue);
}
iwc.setIndexSort(new Sort(sortField));
RandomIndexWriter writer = new RandomIndexWriter(random(), dir, iwc);
final int numDocs = atLeast(100);
for (int i = 0; i < numDocs; ++i) {
Document doc = new Document();
final int numValues = TestUtil.nextInt(random(), 0, 1);
for (int j = 0; j < numValues; ++j) {
final long value = TestUtil.nextLong(random(), -100, 10000);
doc = createSNDVAndPointDocument("field", value);
}
writer.addDocument(doc);
}
if (random().nextBoolean()) {
writer.deleteDocuments(LongPoint.newRangeQuery("field", 0L, 10L));
}
final IndexReader reader = writer.getReader();
final IndexSearcher searcher = newSearcher(reader);
writer.close();
for (int i = 0; i < 100; ++i) {
final long min =
random().nextBoolean() ? Long.MIN_VALUE : TestUtil.nextLong(random(), -100, 10000);
final long max =
random().nextBoolean() ? Long.MAX_VALUE : TestUtil.nextLong(random(), -100, 10000);
final Query q1 = LongPoint.newRangeQuery("field", min, max);
final Query fallbackQuery = LongPoint.newRangeQuery("field", min, max);
final Query q2 =
new IndexSortSortedNumericDocValuesRangeQuery("field", min, max, fallbackQuery);
final Weight weight1 = q1.createWeight(searcher, ScoreMode.COMPLETE, 1.0f);
final Weight weight2 = q2.createWeight(searcher, ScoreMode.COMPLETE, 1.0f);
assertSameCount(weight1, weight2, searcher);
}
reader.close();
dir.close();
}
}
private void assertSameCount(Weight weight1, Weight weight2, IndexSearcher searcher)
throws IOException {
for (LeafReaderContext context : searcher.getLeafContexts()) {
assertEquals(weight1.count(context), weight2.count(context));
}
}
public void testCountBoundary() throws IOException {
Directory dir = newDirectory();
IndexWriterConfig iwc = new IndexWriterConfig(new MockAnalyzer(random()));
SortField sortField = new SortedNumericSortField("field", SortField.Type.LONG);
boolean useLower = random().nextBoolean();
long lowerValue = 1;
long upperValue = 100;
sortField.setMissingValue(useLower ? lowerValue : upperValue);
Sort indexSort = new Sort(sortField);
iwc.setIndexSort(indexSort);
RandomIndexWriter writer = new RandomIndexWriter(random(), dir, iwc);
writer.addDocument(
createSNDVAndPointDocument("field", random().nextLong(lowerValue, upperValue)));
writer.addDocument(
createSNDVAndPointDocument("field", random().nextLong(lowerValue, upperValue)));
// missingValue
writer.addDocument(createMissingValueDocument());
IndexReader reader = writer.getReader();
IndexSearcher searcher = newSearcher(reader);
Query fallbackQuery = LongPoint.newRangeQuery("field", lowerValue, upperValue);
Query query =
new IndexSortSortedNumericDocValuesRangeQuery(
"field", lowerValue, upperValue, fallbackQuery);
Weight weight = query.createWeight(searcher, ScoreMode.COMPLETE, 1.0f);
int count = 0;
for (LeafReaderContext context : searcher.getLeafContexts()) {
count += weight.count(context);
}
assertEquals(2, count);
writer.close();
reader.close();
dir.close();
}
private Document createMissingValueDocument() {
Document doc = new Document();
doc.add(new StringField("foo", "fox", Field.Store.YES));
return doc;
}
private Document createSNDVAndPointDocument(String field, long value) {
Document doc = new Document();
doc.add(new SortedNumericDocValuesField(field, value));
doc.add(new LongPoint(field, value));
return doc;
}
private Document createDocument(String field, long value) {
Document doc = new Document();
doc.add(new SortedNumericDocValuesField(field, value));
return doc;
}
private Query createQuery(String field, long lowerValue, long upperValue) {
Query fallbackQuery =
SortedNumericDocValuesField.newSlowRangeQuery(field, lowerValue, upperValue);
return new IndexSortSortedNumericDocValuesRangeQuery(
field, lowerValue, upperValue, fallbackQuery);
}
public void testCountWithBkdAsc() throws Exception {
doTestCountWithBkd(false);
}
public void testCountWithBkdDesc() throws Exception {
doTestCountWithBkd(true);
}
public void doTestCountWithBkd(boolean reverse) throws Exception {
String filedName = "field";
Directory dir = newDirectory();
IndexWriterConfig iwc = new IndexWriterConfig(new MockAnalyzer(random()));
Sort indexSort = new Sort(new SortedNumericSortField(filedName, SortField.Type.LONG, reverse));
iwc.setIndexSort(indexSort);
RandomIndexWriter writer = new RandomIndexWriter(random(), dir, iwc);
addDocWithBkd(writer, filedName, 7, 500);
addDocWithBkd(writer, filedName, 5, 600);
addDocWithBkd(writer, filedName, 11, 700);
addDocWithBkd(writer, filedName, 13, 800);
addDocWithBkd(writer, filedName, 9, 900);
writer.flush();
writer.forceMerge(1);
IndexReader reader = writer.getReader();
IndexSearcher searcher = newSearcher(reader);
// Both bounds exist in the dataset
Query fallbackQuery = LongPoint.newRangeQuery(filedName, 7, 9);
Query query = new IndexSortSortedNumericDocValuesRangeQuery(filedName, 7, 9, fallbackQuery);
Weight weight = query.createWeight(searcher, ScoreMode.COMPLETE, 1.0f);
for (LeafReaderContext context : searcher.getLeafContexts()) {
assertEquals(1400, weight.count(context));
}
// Both bounds do not exist in the dataset
fallbackQuery = LongPoint.newRangeQuery(filedName, 6, 10);
query = new IndexSortSortedNumericDocValuesRangeQuery(filedName, 6, 10, fallbackQuery);
weight = query.createWeight(searcher, ScoreMode.COMPLETE, 1.0f);
for (LeafReaderContext context : searcher.getLeafContexts()) {
assertEquals(1400, weight.count(context));
}
// Min bound exists in the dataset, not the max
fallbackQuery = LongPoint.newRangeQuery(filedName, 7, 10);
query = new IndexSortSortedNumericDocValuesRangeQuery(filedName, 7, 10, fallbackQuery);
weight = query.createWeight(searcher, ScoreMode.COMPLETE, 1.0f);
for (LeafReaderContext context : searcher.getLeafContexts()) {
assertEquals(1400, weight.count(context));
}
// Min bound doesn't exist in the dataset, max does
fallbackQuery = LongPoint.newRangeQuery(filedName, 6, 9);
query = new IndexSortSortedNumericDocValuesRangeQuery(filedName, 6, 9, fallbackQuery);
weight = query.createWeight(searcher, ScoreMode.COMPLETE, 1.0f);
for (LeafReaderContext context : searcher.getLeafContexts()) {
assertEquals(1400, weight.count(context));
}
// Min bound is the min value of the dataset
fallbackQuery = LongPoint.newRangeQuery(filedName, 5, 8);
query = new IndexSortSortedNumericDocValuesRangeQuery(filedName, 5, 8, fallbackQuery);
weight = query.createWeight(searcher, ScoreMode.COMPLETE, 1.0f);
for (LeafReaderContext context : searcher.getLeafContexts()) {
assertEquals(1100, weight.count(context));
}
// Min bound is less than min value of the dataset
fallbackQuery = LongPoint.newRangeQuery(filedName, 4, 8);
query = new IndexSortSortedNumericDocValuesRangeQuery(filedName, 4, 8, fallbackQuery);
weight = query.createWeight(searcher, ScoreMode.COMPLETE, 1.0f);
for (LeafReaderContext context : searcher.getLeafContexts()) {
assertEquals(1100, weight.count(context));
}
// Max bound is the max value of the dataset
fallbackQuery = LongPoint.newRangeQuery(filedName, 10, 13);
query = new IndexSortSortedNumericDocValuesRangeQuery(filedName, 10, 13, fallbackQuery);
weight = query.createWeight(searcher, ScoreMode.COMPLETE, 1.0f);
for (LeafReaderContext context : searcher.getLeafContexts()) {
assertEquals(1500, weight.count(context));
}
// Max bound is greater than max value of the dataset
fallbackQuery = LongPoint.newRangeQuery(filedName, 10, 14);
query = new IndexSortSortedNumericDocValuesRangeQuery(filedName, 10, 14, fallbackQuery);
weight = query.createWeight(searcher, ScoreMode.COMPLETE, 1.0f);
for (LeafReaderContext context : searcher.getLeafContexts()) {
assertEquals(1500, weight.count(context));
}
// Everything matches
fallbackQuery = LongPoint.newRangeQuery(filedName, 2, 14);
query = new IndexSortSortedNumericDocValuesRangeQuery(filedName, 2, 14, fallbackQuery);
weight = query.createWeight(searcher, ScoreMode.COMPLETE, 1.0f);
for (LeafReaderContext context : searcher.getLeafContexts()) {
assertEquals(3500, weight.count(context));
}
// Bounds equal to min/max values of the dataset, everything matches
fallbackQuery = LongPoint.newRangeQuery(filedName, 2, 14);
query = new IndexSortSortedNumericDocValuesRangeQuery(filedName, 2, 14, fallbackQuery);
weight = query.createWeight(searcher, ScoreMode.COMPLETE, 1.0f);
for (LeafReaderContext context : searcher.getLeafContexts()) {
assertEquals(3500, weight.count(context));
}
// Bounds are less than the min value of the dataset
fallbackQuery = LongPoint.newRangeQuery(filedName, 2, 3);
query = new IndexSortSortedNumericDocValuesRangeQuery(filedName, 2, 3, fallbackQuery);
weight = query.createWeight(searcher, ScoreMode.COMPLETE, 1.0f);
for (LeafReaderContext context : searcher.getLeafContexts()) {
assertEquals(0, weight.count(context));
}
// Bounds are greater than the max value of the dataset
fallbackQuery = LongPoint.newRangeQuery(filedName, 14, 15);
query = new IndexSortSortedNumericDocValuesRangeQuery(filedName, 14, 15, fallbackQuery);
weight = query.createWeight(searcher, ScoreMode.COMPLETE, 1.0f);
for (LeafReaderContext context : searcher.getLeafContexts()) {
assertEquals(0, weight.count(context));
}
writer.close();
reader.close();
dir.close();
}
public void testRandomCountWithBkdAsc() throws Exception {
doTestRandomCountWithBkd(false);
}
public void testRandomCountWithBkdDesc() throws Exception {
doTestRandomCountWithBkd(true);
}
private void doTestRandomCountWithBkd(boolean reverse) throws Exception {
String filedName = "field";
Directory dir = newDirectory();
IndexWriterConfig iwc = new IndexWriterConfig(new MockAnalyzer(random()));
Sort indexSort = new Sort(new SortedNumericSortField(filedName, SortField.Type.LONG, reverse));
iwc.setIndexSort(indexSort);
RandomIndexWriter writer = new RandomIndexWriter(random(), dir, iwc);
Random random = random();
for (int i = 0; i < 100; i++) {
addDocWithBkd(writer, filedName, random.nextInt(1000), random.nextInt(1000));
}
writer.flush();
writer.forceMerge(1);
IndexReader reader = writer.getReader();
IndexSearcher searcher = newSearcher(reader);
for (int i = 0; i < 100; i++) {
int random1 = random.nextInt(1100);
int random2 = random.nextInt(1100);
int low = Math.min(random1, random2);
int upper = Math.max(random1, random2);
Query rangeQuery = LongPoint.newRangeQuery(filedName, low, upper);
Query indexSortRangeQuery =
new IndexSortSortedNumericDocValuesRangeQuery(filedName, low, upper, rangeQuery);
Weight indexSortRangeQueryWeight =
indexSortRangeQuery.createWeight(searcher, ScoreMode.COMPLETE, 1.0f);
Weight rangeQueryWeight = rangeQuery.createWeight(searcher, ScoreMode.COMPLETE, 1.0f);
for (LeafReaderContext context : searcher.getLeafContexts()) {
assertEquals(rangeQueryWeight.count(context), indexSortRangeQueryWeight.count(context));
}
}
writer.close();
reader.close();
dir.close();
}
private void addDocWithBkd(RandomIndexWriter indexWriter, String field, long value, int repeat)
throws IOException {
for (int i = 0; i < repeat; i++) {
Document doc = new Document();
doc.add(new SortedNumericDocValuesField(field, value));
doc.add(new LongPoint(field, value));
indexWriter.addDocument(doc);
}
}
}
|
googleapis/google-cloud-java | 34,912 | java-dialogflow-cx/proto-google-cloud-dialogflow-cx-v3beta1/src/main/java/com/google/cloud/dialogflow/cx/v3beta1/ListGeneratorsRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/dialogflow/cx/v3beta1/generator.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.dialogflow.cx.v3beta1;
/**
*
*
* <pre>
* The request message for
* [Generators.ListGenerators][google.cloud.dialogflow.cx.v3beta1.Generators.ListGenerators].
* </pre>
*
* Protobuf type {@code google.cloud.dialogflow.cx.v3beta1.ListGeneratorsRequest}
*/
public final class ListGeneratorsRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.dialogflow.cx.v3beta1.ListGeneratorsRequest)
ListGeneratorsRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListGeneratorsRequest.newBuilder() to construct.
private ListGeneratorsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListGeneratorsRequest() {
parent_ = "";
languageCode_ = "";
pageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListGeneratorsRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.cx.v3beta1.GeneratorProto
.internal_static_google_cloud_dialogflow_cx_v3beta1_ListGeneratorsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.cx.v3beta1.GeneratorProto
.internal_static_google_cloud_dialogflow_cx_v3beta1_ListGeneratorsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.cx.v3beta1.ListGeneratorsRequest.class,
com.google.cloud.dialogflow.cx.v3beta1.ListGeneratorsRequest.Builder.class);
}
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The agent to list all generators for.
* Format: `projects/<ProjectID>/locations/<LocationID>/agents/<AgentID>`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The agent to list all generators for.
* Format: `projects/<ProjectID>/locations/<LocationID>/agents/<AgentID>`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int LANGUAGE_CODE_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object languageCode_ = "";
/**
*
*
* <pre>
* The language to list generators for.
* </pre>
*
* <code>string language_code = 2;</code>
*
* @return The languageCode.
*/
@java.lang.Override
public java.lang.String getLanguageCode() {
java.lang.Object ref = languageCode_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
languageCode_ = s;
return s;
}
}
/**
*
*
* <pre>
* The language to list generators for.
* </pre>
*
* <code>string language_code = 2;</code>
*
* @return The bytes for languageCode.
*/
@java.lang.Override
public com.google.protobuf.ByteString getLanguageCodeBytes() {
java.lang.Object ref = languageCode_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
languageCode_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_SIZE_FIELD_NUMBER = 3;
private int pageSize_ = 0;
/**
*
*
* <pre>
* The maximum number of items to return in a single page. By default 100 and
* at most 1000.
* </pre>
*
* <code>int32 page_size = 3;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
public static final int PAGE_TOKEN_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* The next_page_token value returned from a previous list request.
* </pre>
*
* <code>string page_token = 4;</code>
*
* @return The pageToken.
*/
@java.lang.Override
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* The next_page_token value returned from a previous list request.
* </pre>
*
* <code>string page_token = 4;</code>
*
* @return The bytes for pageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(languageCode_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, languageCode_);
}
if (pageSize_ != 0) {
output.writeInt32(3, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, pageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(languageCode_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, languageCode_);
}
if (pageSize_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(3, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, pageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.dialogflow.cx.v3beta1.ListGeneratorsRequest)) {
return super.equals(obj);
}
com.google.cloud.dialogflow.cx.v3beta1.ListGeneratorsRequest other =
(com.google.cloud.dialogflow.cx.v3beta1.ListGeneratorsRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (!getLanguageCode().equals(other.getLanguageCode())) return false;
if (getPageSize() != other.getPageSize()) return false;
if (!getPageToken().equals(other.getPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + LANGUAGE_CODE_FIELD_NUMBER;
hash = (53 * hash) + getLanguageCode().hashCode();
hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getPageSize();
hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.dialogflow.cx.v3beta1.ListGeneratorsRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.cx.v3beta1.ListGeneratorsRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3beta1.ListGeneratorsRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.cx.v3beta1.ListGeneratorsRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3beta1.ListGeneratorsRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.cx.v3beta1.ListGeneratorsRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3beta1.ListGeneratorsRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.cx.v3beta1.ListGeneratorsRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3beta1.ListGeneratorsRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.cx.v3beta1.ListGeneratorsRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3beta1.ListGeneratorsRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.cx.v3beta1.ListGeneratorsRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.dialogflow.cx.v3beta1.ListGeneratorsRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The request message for
* [Generators.ListGenerators][google.cloud.dialogflow.cx.v3beta1.Generators.ListGenerators].
* </pre>
*
* Protobuf type {@code google.cloud.dialogflow.cx.v3beta1.ListGeneratorsRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.dialogflow.cx.v3beta1.ListGeneratorsRequest)
com.google.cloud.dialogflow.cx.v3beta1.ListGeneratorsRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.cx.v3beta1.GeneratorProto
.internal_static_google_cloud_dialogflow_cx_v3beta1_ListGeneratorsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.cx.v3beta1.GeneratorProto
.internal_static_google_cloud_dialogflow_cx_v3beta1_ListGeneratorsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.cx.v3beta1.ListGeneratorsRequest.class,
com.google.cloud.dialogflow.cx.v3beta1.ListGeneratorsRequest.Builder.class);
}
// Construct using com.google.cloud.dialogflow.cx.v3beta1.ListGeneratorsRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
languageCode_ = "";
pageSize_ = 0;
pageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.dialogflow.cx.v3beta1.GeneratorProto
.internal_static_google_cloud_dialogflow_cx_v3beta1_ListGeneratorsRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3beta1.ListGeneratorsRequest
getDefaultInstanceForType() {
return com.google.cloud.dialogflow.cx.v3beta1.ListGeneratorsRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3beta1.ListGeneratorsRequest build() {
com.google.cloud.dialogflow.cx.v3beta1.ListGeneratorsRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3beta1.ListGeneratorsRequest buildPartial() {
com.google.cloud.dialogflow.cx.v3beta1.ListGeneratorsRequest result =
new com.google.cloud.dialogflow.cx.v3beta1.ListGeneratorsRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.dialogflow.cx.v3beta1.ListGeneratorsRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.languageCode_ = languageCode_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.pageSize_ = pageSize_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.pageToken_ = pageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.dialogflow.cx.v3beta1.ListGeneratorsRequest) {
return mergeFrom((com.google.cloud.dialogflow.cx.v3beta1.ListGeneratorsRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.dialogflow.cx.v3beta1.ListGeneratorsRequest other) {
if (other
== com.google.cloud.dialogflow.cx.v3beta1.ListGeneratorsRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getLanguageCode().isEmpty()) {
languageCode_ = other.languageCode_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.getPageSize() != 0) {
setPageSize(other.getPageSize());
}
if (!other.getPageToken().isEmpty()) {
pageToken_ = other.pageToken_;
bitField0_ |= 0x00000008;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
languageCode_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 24:
{
pageSize_ = input.readInt32();
bitField0_ |= 0x00000004;
break;
} // case 24
case 34:
{
pageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000008;
break;
} // case 34
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The agent to list all generators for.
* Format: `projects/<ProjectID>/locations/<LocationID>/agents/<AgentID>`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The agent to list all generators for.
* Format: `projects/<ProjectID>/locations/<LocationID>/agents/<AgentID>`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The agent to list all generators for.
* Format: `projects/<ProjectID>/locations/<LocationID>/agents/<AgentID>`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The agent to list all generators for.
* Format: `projects/<ProjectID>/locations/<LocationID>/agents/<AgentID>`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The agent to list all generators for.
* Format: `projects/<ProjectID>/locations/<LocationID>/agents/<AgentID>`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object languageCode_ = "";
/**
*
*
* <pre>
* The language to list generators for.
* </pre>
*
* <code>string language_code = 2;</code>
*
* @return The languageCode.
*/
public java.lang.String getLanguageCode() {
java.lang.Object ref = languageCode_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
languageCode_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The language to list generators for.
* </pre>
*
* <code>string language_code = 2;</code>
*
* @return The bytes for languageCode.
*/
public com.google.protobuf.ByteString getLanguageCodeBytes() {
java.lang.Object ref = languageCode_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
languageCode_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The language to list generators for.
* </pre>
*
* <code>string language_code = 2;</code>
*
* @param value The languageCode to set.
* @return This builder for chaining.
*/
public Builder setLanguageCode(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
languageCode_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The language to list generators for.
* </pre>
*
* <code>string language_code = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearLanguageCode() {
languageCode_ = getDefaultInstance().getLanguageCode();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* The language to list generators for.
* </pre>
*
* <code>string language_code = 2;</code>
*
* @param value The bytes for languageCode to set.
* @return This builder for chaining.
*/
public Builder setLanguageCodeBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
languageCode_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private int pageSize_;
/**
*
*
* <pre>
* The maximum number of items to return in a single page. By default 100 and
* at most 1000.
* </pre>
*
* <code>int32 page_size = 3;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
/**
*
*
* <pre>
* The maximum number of items to return in a single page. By default 100 and
* at most 1000.
* </pre>
*
* <code>int32 page_size = 3;</code>
*
* @param value The pageSize to set.
* @return This builder for chaining.
*/
public Builder setPageSize(int value) {
pageSize_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* The maximum number of items to return in a single page. By default 100 and
* at most 1000.
* </pre>
*
* <code>int32 page_size = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageSize() {
bitField0_ = (bitField0_ & ~0x00000004);
pageSize_ = 0;
onChanged();
return this;
}
private java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* The next_page_token value returned from a previous list request.
* </pre>
*
* <code>string page_token = 4;</code>
*
* @return The pageToken.
*/
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The next_page_token value returned from a previous list request.
* </pre>
*
* <code>string page_token = 4;</code>
*
* @return The bytes for pageToken.
*/
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The next_page_token value returned from a previous list request.
* </pre>
*
* <code>string page_token = 4;</code>
*
* @param value The pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
pageToken_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* The next_page_token value returned from a previous list request.
* </pre>
*
* <code>string page_token = 4;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageToken() {
pageToken_ = getDefaultInstance().getPageToken();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
*
*
* <pre>
* The next_page_token value returned from a previous list request.
* </pre>
*
* <code>string page_token = 4;</code>
*
* @param value The bytes for pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
pageToken_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.dialogflow.cx.v3beta1.ListGeneratorsRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.dialogflow.cx.v3beta1.ListGeneratorsRequest)
private static final com.google.cloud.dialogflow.cx.v3beta1.ListGeneratorsRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.dialogflow.cx.v3beta1.ListGeneratorsRequest();
}
public static com.google.cloud.dialogflow.cx.v3beta1.ListGeneratorsRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListGeneratorsRequest> PARSER =
new com.google.protobuf.AbstractParser<ListGeneratorsRequest>() {
@java.lang.Override
public ListGeneratorsRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListGeneratorsRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListGeneratorsRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3beta1.ListGeneratorsRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 34,921 | java-containeranalysis/proto-google-cloud-containeranalysis-v1beta1/src/main/java/io/grafeas/v1beta1/BatchCreateOccurrencesRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/devtools/containeranalysis/v1beta1/grafeas/grafeas.proto
// Protobuf Java Version: 3.25.8
package io.grafeas.v1beta1;
/**
*
*
* <pre>
* Request to create occurrences in batch.
* </pre>
*
* Protobuf type {@code grafeas.v1beta1.BatchCreateOccurrencesRequest}
*/
public final class BatchCreateOccurrencesRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:grafeas.v1beta1.BatchCreateOccurrencesRequest)
BatchCreateOccurrencesRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use BatchCreateOccurrencesRequest.newBuilder() to construct.
private BatchCreateOccurrencesRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private BatchCreateOccurrencesRequest() {
parent_ = "";
occurrences_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new BatchCreateOccurrencesRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return io.grafeas.v1beta1.Grafeas
.internal_static_grafeas_v1beta1_BatchCreateOccurrencesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return io.grafeas.v1beta1.Grafeas
.internal_static_grafeas_v1beta1_BatchCreateOccurrencesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
io.grafeas.v1beta1.BatchCreateOccurrencesRequest.class,
io.grafeas.v1beta1.BatchCreateOccurrencesRequest.Builder.class);
}
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* The name of the project in the form of `projects/[PROJECT_ID]`, under which
* the occurrences are to be created.
* </pre>
*
* <code>string parent = 1;</code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* The name of the project in the form of `projects/[PROJECT_ID]`, under which
* the occurrences are to be created.
* </pre>
*
* <code>string parent = 1;</code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int OCCURRENCES_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private java.util.List<io.grafeas.v1beta1.Occurrence> occurrences_;
/**
*
*
* <pre>
* The occurrences to create.
* </pre>
*
* <code>repeated .grafeas.v1beta1.Occurrence occurrences = 2;</code>
*/
@java.lang.Override
public java.util.List<io.grafeas.v1beta1.Occurrence> getOccurrencesList() {
return occurrences_;
}
/**
*
*
* <pre>
* The occurrences to create.
* </pre>
*
* <code>repeated .grafeas.v1beta1.Occurrence occurrences = 2;</code>
*/
@java.lang.Override
public java.util.List<? extends io.grafeas.v1beta1.OccurrenceOrBuilder>
getOccurrencesOrBuilderList() {
return occurrences_;
}
/**
*
*
* <pre>
* The occurrences to create.
* </pre>
*
* <code>repeated .grafeas.v1beta1.Occurrence occurrences = 2;</code>
*/
@java.lang.Override
public int getOccurrencesCount() {
return occurrences_.size();
}
/**
*
*
* <pre>
* The occurrences to create.
* </pre>
*
* <code>repeated .grafeas.v1beta1.Occurrence occurrences = 2;</code>
*/
@java.lang.Override
public io.grafeas.v1beta1.Occurrence getOccurrences(int index) {
return occurrences_.get(index);
}
/**
*
*
* <pre>
* The occurrences to create.
* </pre>
*
* <code>repeated .grafeas.v1beta1.Occurrence occurrences = 2;</code>
*/
@java.lang.Override
public io.grafeas.v1beta1.OccurrenceOrBuilder getOccurrencesOrBuilder(int index) {
return occurrences_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
for (int i = 0; i < occurrences_.size(); i++) {
output.writeMessage(2, occurrences_.get(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
for (int i = 0; i < occurrences_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, occurrences_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof io.grafeas.v1beta1.BatchCreateOccurrencesRequest)) {
return super.equals(obj);
}
io.grafeas.v1beta1.BatchCreateOccurrencesRequest other =
(io.grafeas.v1beta1.BatchCreateOccurrencesRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (!getOccurrencesList().equals(other.getOccurrencesList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
if (getOccurrencesCount() > 0) {
hash = (37 * hash) + OCCURRENCES_FIELD_NUMBER;
hash = (53 * hash) + getOccurrencesList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static io.grafeas.v1beta1.BatchCreateOccurrencesRequest parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static io.grafeas.v1beta1.BatchCreateOccurrencesRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static io.grafeas.v1beta1.BatchCreateOccurrencesRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static io.grafeas.v1beta1.BatchCreateOccurrencesRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static io.grafeas.v1beta1.BatchCreateOccurrencesRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static io.grafeas.v1beta1.BatchCreateOccurrencesRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static io.grafeas.v1beta1.BatchCreateOccurrencesRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static io.grafeas.v1beta1.BatchCreateOccurrencesRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static io.grafeas.v1beta1.BatchCreateOccurrencesRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static io.grafeas.v1beta1.BatchCreateOccurrencesRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static io.grafeas.v1beta1.BatchCreateOccurrencesRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static io.grafeas.v1beta1.BatchCreateOccurrencesRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(io.grafeas.v1beta1.BatchCreateOccurrencesRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request to create occurrences in batch.
* </pre>
*
* Protobuf type {@code grafeas.v1beta1.BatchCreateOccurrencesRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:grafeas.v1beta1.BatchCreateOccurrencesRequest)
io.grafeas.v1beta1.BatchCreateOccurrencesRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return io.grafeas.v1beta1.Grafeas
.internal_static_grafeas_v1beta1_BatchCreateOccurrencesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return io.grafeas.v1beta1.Grafeas
.internal_static_grafeas_v1beta1_BatchCreateOccurrencesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
io.grafeas.v1beta1.BatchCreateOccurrencesRequest.class,
io.grafeas.v1beta1.BatchCreateOccurrencesRequest.Builder.class);
}
// Construct using io.grafeas.v1beta1.BatchCreateOccurrencesRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
if (occurrencesBuilder_ == null) {
occurrences_ = java.util.Collections.emptyList();
} else {
occurrences_ = null;
occurrencesBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return io.grafeas.v1beta1.Grafeas
.internal_static_grafeas_v1beta1_BatchCreateOccurrencesRequest_descriptor;
}
@java.lang.Override
public io.grafeas.v1beta1.BatchCreateOccurrencesRequest getDefaultInstanceForType() {
return io.grafeas.v1beta1.BatchCreateOccurrencesRequest.getDefaultInstance();
}
@java.lang.Override
public io.grafeas.v1beta1.BatchCreateOccurrencesRequest build() {
io.grafeas.v1beta1.BatchCreateOccurrencesRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public io.grafeas.v1beta1.BatchCreateOccurrencesRequest buildPartial() {
io.grafeas.v1beta1.BatchCreateOccurrencesRequest result =
new io.grafeas.v1beta1.BatchCreateOccurrencesRequest(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
io.grafeas.v1beta1.BatchCreateOccurrencesRequest result) {
if (occurrencesBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)) {
occurrences_ = java.util.Collections.unmodifiableList(occurrences_);
bitField0_ = (bitField0_ & ~0x00000002);
}
result.occurrences_ = occurrences_;
} else {
result.occurrences_ = occurrencesBuilder_.build();
}
}
private void buildPartial0(io.grafeas.v1beta1.BatchCreateOccurrencesRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof io.grafeas.v1beta1.BatchCreateOccurrencesRequest) {
return mergeFrom((io.grafeas.v1beta1.BatchCreateOccurrencesRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(io.grafeas.v1beta1.BatchCreateOccurrencesRequest other) {
if (other == io.grafeas.v1beta1.BatchCreateOccurrencesRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (occurrencesBuilder_ == null) {
if (!other.occurrences_.isEmpty()) {
if (occurrences_.isEmpty()) {
occurrences_ = other.occurrences_;
bitField0_ = (bitField0_ & ~0x00000002);
} else {
ensureOccurrencesIsMutable();
occurrences_.addAll(other.occurrences_);
}
onChanged();
}
} else {
if (!other.occurrences_.isEmpty()) {
if (occurrencesBuilder_.isEmpty()) {
occurrencesBuilder_.dispose();
occurrencesBuilder_ = null;
occurrences_ = other.occurrences_;
bitField0_ = (bitField0_ & ~0x00000002);
occurrencesBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getOccurrencesFieldBuilder()
: null;
} else {
occurrencesBuilder_.addAllMessages(other.occurrences_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
io.grafeas.v1beta1.Occurrence m =
input.readMessage(io.grafeas.v1beta1.Occurrence.parser(), extensionRegistry);
if (occurrencesBuilder_ == null) {
ensureOccurrencesIsMutable();
occurrences_.add(m);
} else {
occurrencesBuilder_.addMessage(m);
}
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* The name of the project in the form of `projects/[PROJECT_ID]`, under which
* the occurrences are to be created.
* </pre>
*
* <code>string parent = 1;</code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The name of the project in the form of `projects/[PROJECT_ID]`, under which
* the occurrences are to be created.
* </pre>
*
* <code>string parent = 1;</code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The name of the project in the form of `projects/[PROJECT_ID]`, under which
* the occurrences are to be created.
* </pre>
*
* <code>string parent = 1;</code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* The name of the project in the form of `projects/[PROJECT_ID]`, under which
* the occurrences are to be created.
* </pre>
*
* <code>string parent = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* The name of the project in the form of `projects/[PROJECT_ID]`, under which
* the occurrences are to be created.
* </pre>
*
* <code>string parent = 1;</code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.util.List<io.grafeas.v1beta1.Occurrence> occurrences_ =
java.util.Collections.emptyList();
private void ensureOccurrencesIsMutable() {
if (!((bitField0_ & 0x00000002) != 0)) {
occurrences_ = new java.util.ArrayList<io.grafeas.v1beta1.Occurrence>(occurrences_);
bitField0_ |= 0x00000002;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
io.grafeas.v1beta1.Occurrence,
io.grafeas.v1beta1.Occurrence.Builder,
io.grafeas.v1beta1.OccurrenceOrBuilder>
occurrencesBuilder_;
/**
*
*
* <pre>
* The occurrences to create.
* </pre>
*
* <code>repeated .grafeas.v1beta1.Occurrence occurrences = 2;</code>
*/
public java.util.List<io.grafeas.v1beta1.Occurrence> getOccurrencesList() {
if (occurrencesBuilder_ == null) {
return java.util.Collections.unmodifiableList(occurrences_);
} else {
return occurrencesBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The occurrences to create.
* </pre>
*
* <code>repeated .grafeas.v1beta1.Occurrence occurrences = 2;</code>
*/
public int getOccurrencesCount() {
if (occurrencesBuilder_ == null) {
return occurrences_.size();
} else {
return occurrencesBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The occurrences to create.
* </pre>
*
* <code>repeated .grafeas.v1beta1.Occurrence occurrences = 2;</code>
*/
public io.grafeas.v1beta1.Occurrence getOccurrences(int index) {
if (occurrencesBuilder_ == null) {
return occurrences_.get(index);
} else {
return occurrencesBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The occurrences to create.
* </pre>
*
* <code>repeated .grafeas.v1beta1.Occurrence occurrences = 2;</code>
*/
public Builder setOccurrences(int index, io.grafeas.v1beta1.Occurrence value) {
if (occurrencesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureOccurrencesIsMutable();
occurrences_.set(index, value);
onChanged();
} else {
occurrencesBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The occurrences to create.
* </pre>
*
* <code>repeated .grafeas.v1beta1.Occurrence occurrences = 2;</code>
*/
public Builder setOccurrences(
int index, io.grafeas.v1beta1.Occurrence.Builder builderForValue) {
if (occurrencesBuilder_ == null) {
ensureOccurrencesIsMutable();
occurrences_.set(index, builderForValue.build());
onChanged();
} else {
occurrencesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The occurrences to create.
* </pre>
*
* <code>repeated .grafeas.v1beta1.Occurrence occurrences = 2;</code>
*/
public Builder addOccurrences(io.grafeas.v1beta1.Occurrence value) {
if (occurrencesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureOccurrencesIsMutable();
occurrences_.add(value);
onChanged();
} else {
occurrencesBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The occurrences to create.
* </pre>
*
* <code>repeated .grafeas.v1beta1.Occurrence occurrences = 2;</code>
*/
public Builder addOccurrences(int index, io.grafeas.v1beta1.Occurrence value) {
if (occurrencesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureOccurrencesIsMutable();
occurrences_.add(index, value);
onChanged();
} else {
occurrencesBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The occurrences to create.
* </pre>
*
* <code>repeated .grafeas.v1beta1.Occurrence occurrences = 2;</code>
*/
public Builder addOccurrences(io.grafeas.v1beta1.Occurrence.Builder builderForValue) {
if (occurrencesBuilder_ == null) {
ensureOccurrencesIsMutable();
occurrences_.add(builderForValue.build());
onChanged();
} else {
occurrencesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The occurrences to create.
* </pre>
*
* <code>repeated .grafeas.v1beta1.Occurrence occurrences = 2;</code>
*/
public Builder addOccurrences(
int index, io.grafeas.v1beta1.Occurrence.Builder builderForValue) {
if (occurrencesBuilder_ == null) {
ensureOccurrencesIsMutable();
occurrences_.add(index, builderForValue.build());
onChanged();
} else {
occurrencesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The occurrences to create.
* </pre>
*
* <code>repeated .grafeas.v1beta1.Occurrence occurrences = 2;</code>
*/
public Builder addAllOccurrences(
java.lang.Iterable<? extends io.grafeas.v1beta1.Occurrence> values) {
if (occurrencesBuilder_ == null) {
ensureOccurrencesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, occurrences_);
onChanged();
} else {
occurrencesBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The occurrences to create.
* </pre>
*
* <code>repeated .grafeas.v1beta1.Occurrence occurrences = 2;</code>
*/
public Builder clearOccurrences() {
if (occurrencesBuilder_ == null) {
occurrences_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
} else {
occurrencesBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The occurrences to create.
* </pre>
*
* <code>repeated .grafeas.v1beta1.Occurrence occurrences = 2;</code>
*/
public Builder removeOccurrences(int index) {
if (occurrencesBuilder_ == null) {
ensureOccurrencesIsMutable();
occurrences_.remove(index);
onChanged();
} else {
occurrencesBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The occurrences to create.
* </pre>
*
* <code>repeated .grafeas.v1beta1.Occurrence occurrences = 2;</code>
*/
public io.grafeas.v1beta1.Occurrence.Builder getOccurrencesBuilder(int index) {
return getOccurrencesFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The occurrences to create.
* </pre>
*
* <code>repeated .grafeas.v1beta1.Occurrence occurrences = 2;</code>
*/
public io.grafeas.v1beta1.OccurrenceOrBuilder getOccurrencesOrBuilder(int index) {
if (occurrencesBuilder_ == null) {
return occurrences_.get(index);
} else {
return occurrencesBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The occurrences to create.
* </pre>
*
* <code>repeated .grafeas.v1beta1.Occurrence occurrences = 2;</code>
*/
public java.util.List<? extends io.grafeas.v1beta1.OccurrenceOrBuilder>
getOccurrencesOrBuilderList() {
if (occurrencesBuilder_ != null) {
return occurrencesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(occurrences_);
}
}
/**
*
*
* <pre>
* The occurrences to create.
* </pre>
*
* <code>repeated .grafeas.v1beta1.Occurrence occurrences = 2;</code>
*/
public io.grafeas.v1beta1.Occurrence.Builder addOccurrencesBuilder() {
return getOccurrencesFieldBuilder()
.addBuilder(io.grafeas.v1beta1.Occurrence.getDefaultInstance());
}
/**
*
*
* <pre>
* The occurrences to create.
* </pre>
*
* <code>repeated .grafeas.v1beta1.Occurrence occurrences = 2;</code>
*/
public io.grafeas.v1beta1.Occurrence.Builder addOccurrencesBuilder(int index) {
return getOccurrencesFieldBuilder()
.addBuilder(index, io.grafeas.v1beta1.Occurrence.getDefaultInstance());
}
/**
*
*
* <pre>
* The occurrences to create.
* </pre>
*
* <code>repeated .grafeas.v1beta1.Occurrence occurrences = 2;</code>
*/
public java.util.List<io.grafeas.v1beta1.Occurrence.Builder> getOccurrencesBuilderList() {
return getOccurrencesFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
io.grafeas.v1beta1.Occurrence,
io.grafeas.v1beta1.Occurrence.Builder,
io.grafeas.v1beta1.OccurrenceOrBuilder>
getOccurrencesFieldBuilder() {
if (occurrencesBuilder_ == null) {
occurrencesBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
io.grafeas.v1beta1.Occurrence,
io.grafeas.v1beta1.Occurrence.Builder,
io.grafeas.v1beta1.OccurrenceOrBuilder>(
occurrences_, ((bitField0_ & 0x00000002) != 0), getParentForChildren(), isClean());
occurrences_ = null;
}
return occurrencesBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:grafeas.v1beta1.BatchCreateOccurrencesRequest)
}
// @@protoc_insertion_point(class_scope:grafeas.v1beta1.BatchCreateOccurrencesRequest)
private static final io.grafeas.v1beta1.BatchCreateOccurrencesRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new io.grafeas.v1beta1.BatchCreateOccurrencesRequest();
}
public static io.grafeas.v1beta1.BatchCreateOccurrencesRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<BatchCreateOccurrencesRequest> PARSER =
new com.google.protobuf.AbstractParser<BatchCreateOccurrencesRequest>() {
@java.lang.Override
public BatchCreateOccurrencesRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<BatchCreateOccurrencesRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<BatchCreateOccurrencesRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public io.grafeas.v1beta1.BatchCreateOccurrencesRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 35,045 | java-datastream/proto-google-cloud-datastream-v1/src/main/java/com/google/cloud/datastream/v1/DeletePrivateConnectionRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/datastream/v1/datastream.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.datastream.v1;
/**
*
*
* <pre>
* Request to delete a private connection.
* </pre>
*
* Protobuf type {@code google.cloud.datastream.v1.DeletePrivateConnectionRequest}
*/
public final class DeletePrivateConnectionRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.datastream.v1.DeletePrivateConnectionRequest)
DeletePrivateConnectionRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use DeletePrivateConnectionRequest.newBuilder() to construct.
private DeletePrivateConnectionRequest(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private DeletePrivateConnectionRequest() {
name_ = "";
requestId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new DeletePrivateConnectionRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.datastream.v1.DatastreamProto
.internal_static_google_cloud_datastream_v1_DeletePrivateConnectionRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.datastream.v1.DatastreamProto
.internal_static_google_cloud_datastream_v1_DeletePrivateConnectionRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.datastream.v1.DeletePrivateConnectionRequest.class,
com.google.cloud.datastream.v1.DeletePrivateConnectionRequest.Builder.class);
}
public static final int NAME_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object name_ = "";
/**
*
*
* <pre>
* Required. The name of the private connectivity configuration to delete.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The name.
*/
@java.lang.Override
public java.lang.String getName() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The name of the private connectivity configuration to delete.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for name.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int REQUEST_ID_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object requestId_ = "";
/**
*
*
* <pre>
* Optional. A request ID to identify requests. Specify a unique request ID
* so that if you must retry your request, the server will know to ignore
* the request if it has already been completed. The server will guarantee
* that for at least 60 minutes after the first request.
*
* For example, consider a situation where you make an initial request and the
* request times out. If you make the request again with the same request ID,
* the server can check if original operation with the same request ID was
* received, and if so, will ignore the second request. This prevents clients
* from accidentally creating duplicate commitments.
*
* The request ID must be a valid UUID with the exception that zero UUID is
* not supported (00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The requestId.
*/
@java.lang.Override
public java.lang.String getRequestId() {
java.lang.Object ref = requestId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
requestId_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. A request ID to identify requests. Specify a unique request ID
* so that if you must retry your request, the server will know to ignore
* the request if it has already been completed. The server will guarantee
* that for at least 60 minutes after the first request.
*
* For example, consider a situation where you make an initial request and the
* request times out. If you make the request again with the same request ID,
* the server can check if original operation with the same request ID was
* received, and if so, will ignore the second request. This prevents clients
* from accidentally creating duplicate commitments.
*
* The request ID must be a valid UUID with the exception that zero UUID is
* not supported (00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for requestId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getRequestIdBytes() {
java.lang.Object ref = requestId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
requestId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int FORCE_FIELD_NUMBER = 3;
private boolean force_ = false;
/**
*
*
* <pre>
* Optional. If set to true, any child routes that belong to this
* PrivateConnection will also be deleted.
* </pre>
*
* <code>bool force = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The force.
*/
@java.lang.Override
public boolean getForce() {
return force_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(requestId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, requestId_);
}
if (force_ != false) {
output.writeBool(3, force_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(requestId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, requestId_);
}
if (force_ != false) {
size += com.google.protobuf.CodedOutputStream.computeBoolSize(3, force_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.datastream.v1.DeletePrivateConnectionRequest)) {
return super.equals(obj);
}
com.google.cloud.datastream.v1.DeletePrivateConnectionRequest other =
(com.google.cloud.datastream.v1.DeletePrivateConnectionRequest) obj;
if (!getName().equals(other.getName())) return false;
if (!getRequestId().equals(other.getRequestId())) return false;
if (getForce() != other.getForce()) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + NAME_FIELD_NUMBER;
hash = (53 * hash) + getName().hashCode();
hash = (37 * hash) + REQUEST_ID_FIELD_NUMBER;
hash = (53 * hash) + getRequestId().hashCode();
hash = (37 * hash) + FORCE_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getForce());
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.datastream.v1.DeletePrivateConnectionRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datastream.v1.DeletePrivateConnectionRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datastream.v1.DeletePrivateConnectionRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datastream.v1.DeletePrivateConnectionRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datastream.v1.DeletePrivateConnectionRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datastream.v1.DeletePrivateConnectionRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datastream.v1.DeletePrivateConnectionRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.datastream.v1.DeletePrivateConnectionRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.datastream.v1.DeletePrivateConnectionRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.datastream.v1.DeletePrivateConnectionRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.datastream.v1.DeletePrivateConnectionRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.datastream.v1.DeletePrivateConnectionRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.datastream.v1.DeletePrivateConnectionRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request to delete a private connection.
* </pre>
*
* Protobuf type {@code google.cloud.datastream.v1.DeletePrivateConnectionRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.datastream.v1.DeletePrivateConnectionRequest)
com.google.cloud.datastream.v1.DeletePrivateConnectionRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.datastream.v1.DatastreamProto
.internal_static_google_cloud_datastream_v1_DeletePrivateConnectionRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.datastream.v1.DatastreamProto
.internal_static_google_cloud_datastream_v1_DeletePrivateConnectionRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.datastream.v1.DeletePrivateConnectionRequest.class,
com.google.cloud.datastream.v1.DeletePrivateConnectionRequest.Builder.class);
}
// Construct using com.google.cloud.datastream.v1.DeletePrivateConnectionRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
name_ = "";
requestId_ = "";
force_ = false;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.datastream.v1.DatastreamProto
.internal_static_google_cloud_datastream_v1_DeletePrivateConnectionRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.datastream.v1.DeletePrivateConnectionRequest
getDefaultInstanceForType() {
return com.google.cloud.datastream.v1.DeletePrivateConnectionRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.datastream.v1.DeletePrivateConnectionRequest build() {
com.google.cloud.datastream.v1.DeletePrivateConnectionRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.datastream.v1.DeletePrivateConnectionRequest buildPartial() {
com.google.cloud.datastream.v1.DeletePrivateConnectionRequest result =
new com.google.cloud.datastream.v1.DeletePrivateConnectionRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.datastream.v1.DeletePrivateConnectionRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.name_ = name_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.requestId_ = requestId_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.force_ = force_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.datastream.v1.DeletePrivateConnectionRequest) {
return mergeFrom((com.google.cloud.datastream.v1.DeletePrivateConnectionRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.datastream.v1.DeletePrivateConnectionRequest other) {
if (other
== com.google.cloud.datastream.v1.DeletePrivateConnectionRequest.getDefaultInstance())
return this;
if (!other.getName().isEmpty()) {
name_ = other.name_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getRequestId().isEmpty()) {
requestId_ = other.requestId_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.getForce() != false) {
setForce(other.getForce());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
name_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
requestId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 24:
{
force_ = input.readBool();
bitField0_ |= 0x00000004;
break;
} // case 24
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object name_ = "";
/**
*
*
* <pre>
* Required. The name of the private connectivity configuration to delete.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The name.
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The name of the private connectivity configuration to delete.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for name.
*/
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The name of the private connectivity configuration to delete.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The name to set.
* @return This builder for chaining.
*/
public Builder setName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
name_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The name of the private connectivity configuration to delete.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearName() {
name_ = getDefaultInstance().getName();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The name of the private connectivity configuration to delete.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for name to set.
* @return This builder for chaining.
*/
public Builder setNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
name_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object requestId_ = "";
/**
*
*
* <pre>
* Optional. A request ID to identify requests. Specify a unique request ID
* so that if you must retry your request, the server will know to ignore
* the request if it has already been completed. The server will guarantee
* that for at least 60 minutes after the first request.
*
* For example, consider a situation where you make an initial request and the
* request times out. If you make the request again with the same request ID,
* the server can check if original operation with the same request ID was
* received, and if so, will ignore the second request. This prevents clients
* from accidentally creating duplicate commitments.
*
* The request ID must be a valid UUID with the exception that zero UUID is
* not supported (00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The requestId.
*/
public java.lang.String getRequestId() {
java.lang.Object ref = requestId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
requestId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. A request ID to identify requests. Specify a unique request ID
* so that if you must retry your request, the server will know to ignore
* the request if it has already been completed. The server will guarantee
* that for at least 60 minutes after the first request.
*
* For example, consider a situation where you make an initial request and the
* request times out. If you make the request again with the same request ID,
* the server can check if original operation with the same request ID was
* received, and if so, will ignore the second request. This prevents clients
* from accidentally creating duplicate commitments.
*
* The request ID must be a valid UUID with the exception that zero UUID is
* not supported (00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for requestId.
*/
public com.google.protobuf.ByteString getRequestIdBytes() {
java.lang.Object ref = requestId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
requestId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. A request ID to identify requests. Specify a unique request ID
* so that if you must retry your request, the server will know to ignore
* the request if it has already been completed. The server will guarantee
* that for at least 60 minutes after the first request.
*
* For example, consider a situation where you make an initial request and the
* request times out. If you make the request again with the same request ID,
* the server can check if original operation with the same request ID was
* received, and if so, will ignore the second request. This prevents clients
* from accidentally creating duplicate commitments.
*
* The request ID must be a valid UUID with the exception that zero UUID is
* not supported (00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The requestId to set.
* @return This builder for chaining.
*/
public Builder setRequestId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
requestId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. A request ID to identify requests. Specify a unique request ID
* so that if you must retry your request, the server will know to ignore
* the request if it has already been completed. The server will guarantee
* that for at least 60 minutes after the first request.
*
* For example, consider a situation where you make an initial request and the
* request times out. If you make the request again with the same request ID,
* the server can check if original operation with the same request ID was
* received, and if so, will ignore the second request. This prevents clients
* from accidentally creating duplicate commitments.
*
* The request ID must be a valid UUID with the exception that zero UUID is
* not supported (00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearRequestId() {
requestId_ = getDefaultInstance().getRequestId();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. A request ID to identify requests. Specify a unique request ID
* so that if you must retry your request, the server will know to ignore
* the request if it has already been completed. The server will guarantee
* that for at least 60 minutes after the first request.
*
* For example, consider a situation where you make an initial request and the
* request times out. If you make the request again with the same request ID,
* the server can check if original operation with the same request ID was
* received, and if so, will ignore the second request. This prevents clients
* from accidentally creating duplicate commitments.
*
* The request ID must be a valid UUID with the exception that zero UUID is
* not supported (00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for requestId to set.
* @return This builder for chaining.
*/
public Builder setRequestIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
requestId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private boolean force_;
/**
*
*
* <pre>
* Optional. If set to true, any child routes that belong to this
* PrivateConnection will also be deleted.
* </pre>
*
* <code>bool force = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The force.
*/
@java.lang.Override
public boolean getForce() {
return force_;
}
/**
*
*
* <pre>
* Optional. If set to true, any child routes that belong to this
* PrivateConnection will also be deleted.
* </pre>
*
* <code>bool force = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The force to set.
* @return This builder for chaining.
*/
public Builder setForce(boolean value) {
force_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. If set to true, any child routes that belong to this
* PrivateConnection will also be deleted.
* </pre>
*
* <code>bool force = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearForce() {
bitField0_ = (bitField0_ & ~0x00000004);
force_ = false;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.datastream.v1.DeletePrivateConnectionRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.datastream.v1.DeletePrivateConnectionRequest)
private static final com.google.cloud.datastream.v1.DeletePrivateConnectionRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.datastream.v1.DeletePrivateConnectionRequest();
}
public static com.google.cloud.datastream.v1.DeletePrivateConnectionRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<DeletePrivateConnectionRequest> PARSER =
new com.google.protobuf.AbstractParser<DeletePrivateConnectionRequest>() {
@java.lang.Override
public DeletePrivateConnectionRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<DeletePrivateConnectionRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<DeletePrivateConnectionRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.datastream.v1.DeletePrivateConnectionRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/jackrabbit | 35,304 | jackrabbit-spi-commons/src/main/java/org/apache/jackrabbit/spi/commons/batch/ConsolidatingChangeLog.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.spi.commons.batch;
import java.util.Iterator;
import java.util.ListIterator;
import javax.jcr.RepositoryException;
import org.apache.jackrabbit.spi.Batch;
import org.apache.jackrabbit.spi.ItemId;
import org.apache.jackrabbit.spi.Name;
import org.apache.jackrabbit.spi.NodeId;
import org.apache.jackrabbit.spi.Path;
import org.apache.jackrabbit.spi.PathFactory;
import org.apache.jackrabbit.spi.PropertyId;
import org.apache.jackrabbit.spi.QValue;
import org.apache.jackrabbit.spi.Tree;
import org.apache.jackrabbit.spi.commons.name.PathFactoryImpl;
/**
* A {@link ChangeLog} implementation which does basic consolidation on its
* {@link org.apache.jackrabbit.spi.commons.batch.Operation Operation}s. That is, cancelling
* operations are removed if possible. In general this is not possible across
* {@link org.apache.jackrabbit.spi.commons.batch.Operations.Move move} operations. The individual
* {@link CancelableOperation CancelableOperation} implementations document their behavior
* concerning cancellation.
*/
public class ConsolidatingChangeLog extends AbstractChangeLog<ConsolidatingChangeLog.CancelableOperation> {
private static final PathFactory PATH_FACTORY = PathFactoryImpl.getInstance();
/**
* Create a new instance of a consolidating change log.
*/
public ConsolidatingChangeLog() {
super();
}
/**
* Create a {@link Path} from the {@link NodeId} of a parent and the {@link Name} of a
* child.
* @param parentId node id of the parent
* @param name name of the child
* @return the path of the item <code>name</code> or <code>null</code> if <code>parentId</code>'s
* path is not absolute
* @throws RepositoryException
*/
protected static Path getPath(NodeId parentId, Name name) throws RepositoryException {
Path parent = parentId.getPath();
if (!parent.isAbsolute()) {
return null;
}
return PATH_FACTORY.create(parent, name, true);
}
/**
* Determine the {@link Path} from an {@link ItemId}.
* @param itemId
* @return path of the item <code>itemId</code> or <code>null</code> if <code>itemId</code>'s
* path is not absolute
*/
protected static Path getPath(ItemId itemId) {
Path path = itemId.getPath();
if (path != null && !path.isAbsolute()) {
return null;
}
return path;
}
// -----------------------------------------------------< ChangeLog >---
public void addNode(NodeId parentId, Name nodeName, Name nodetypeName, String uuid)
throws RepositoryException {
addOperation(CancelableOperations.addNode(parentId, nodeName, nodetypeName, uuid));
}
public void addProperty(NodeId parentId, Name propertyName, QValue value) throws RepositoryException {
addOperation(CancelableOperations.addProperty(parentId, propertyName, value));
}
public void addProperty(NodeId parentId, Name propertyName, QValue[] values) throws RepositoryException {
addOperation(CancelableOperations.addProperty(parentId, propertyName, values));
}
public void move(NodeId srcNodeId, NodeId destParentNodeId, Name destName) throws RepositoryException {
addOperation(CancelableOperations.move(srcNodeId, destParentNodeId, destName));
}
public void remove(ItemId itemId) throws RepositoryException {
addOperation(CancelableOperations.remove(itemId));
}
public void reorderNodes(NodeId parentId, NodeId srcNodeId, NodeId beforeNodeId) throws RepositoryException {
addOperation(CancelableOperations.reorderNodes(parentId, srcNodeId, beforeNodeId));
}
public void setMixins(NodeId nodeId, Name[] mixinNodeTypeNames) throws RepositoryException {
addOperation(CancelableOperations.setMixins(nodeId, mixinNodeTypeNames));
}
public void setPrimaryType(NodeId nodeId, Name primaryNodeTypeName) throws RepositoryException {
addOperation(CancelableOperations.setPrimaryType(nodeId, primaryNodeTypeName));
}
public void setValue(PropertyId propertyId, QValue value) throws RepositoryException {
addOperation(CancelableOperations.setValue(propertyId, value));
}
public void setValue(PropertyId propertyId, QValue[] values) throws RepositoryException {
addOperation(CancelableOperations.setValue(propertyId, values));
}
@Override
public void setTree(NodeId parentId, Tree contentTree) throws RepositoryException {
addOperation(CancelableOperations.setTree(parentId, contentTree));
}
/**
* Determines the cancellation behavior from the list of {@link ChangeLogImpl#operations operations}
* and the current operation <code>op</code>:
* <ul>
* <li>When the current operation is cancelled by the last operation, the list of operations
* is not modified.</li>
* <li>When the current operation and the last operation cancel each other, the last operation is
* removed from the list of operations.</li>
* <li>When the last operation is cancelled by this operation, the last operation is removed from
* the list of operations and determination of cancellation starts from scratch.</li>
* <li>Otherwise add the current operation to the list of operations.</li>
* </ul>
*/
@Override
public void addOperation(CancelableOperation op) throws RepositoryException {
CancelableOperation otherOp = op;
for (OperationsBackwardWithSentinel it = new OperationsBackwardWithSentinel(); it.hasNext(); ) {
CancelableOperation thisOp = it.next();
switch (thisOp.cancel(otherOp)) {
case CancelableOperation.CANCEL_THIS:
it.remove();
continue;
case CancelableOperation.CANCEL_OTHER:
return;
case CancelableOperation.CANCEL_BOTH:
it.remove();
return;
case CancelableOperation.CANCEL_NONE:
super.addOperation(otherOp);
return;
default:
assert false : "Invalid case in switch";
}
}
}
// -----------------------------------------------------< private >---
private class OperationsBackwardWithSentinel implements Iterator<CancelableOperation> {
private final ListIterator<CancelableOperation> it = operations.listIterator(operations.size());
private boolean last = !it.hasPrevious();
private boolean done;
public boolean hasNext() {
return it.hasPrevious() || last;
}
public CancelableOperation next() {
if (last) {
done = true;
return CancelableOperations.empty();
}
else {
CancelableOperation o = it.previous();
last = !it.hasPrevious();
return o;
}
}
public void remove() {
if (done) {
throw new IllegalStateException("Cannot remove last element");
}
else {
it.remove();
}
}
}
// -----------------------------------------------------< CancelableOperations >---
/**
* This class represent an {@link Operation} which can be cancelled by another operation
* or which cancels another operation.
*/
protected interface CancelableOperation extends Operation {
/**
* The other operation cancels this operations
*/
public static final int CANCEL_THIS = 0;
/**
* This operation cancels the other operation
*/
public static final int CANCEL_OTHER = 1;
/**
* This operation and the other operation cancel each other mutually
*/
public static final int CANCEL_BOTH = 2;
/**
* No cancellation
*/
public static final int CANCEL_NONE = 3;
/**
* Determines the cancellation behavior of the <code>other</code> operation
* on this operation.
* @param other
* @return Either {@link #CANCEL_THIS}, {@link #CANCEL_OTHER}, {@link #CANCEL_OTHER}
* or {@link #CANCEL_NONE}
* @throws RepositoryException
*/
public int cancel(CancelableOperation other) throws RepositoryException;
}
/**
* Factory for creating {@link ConsolidatingChangeLog.CancelableOperation CancelableOperation}s.
* The inner classes of this class all implement the <code>CancelableOperation</code> interface.
*
* @see Operation
*/
protected static final class CancelableOperations {
private CancelableOperations() {
super();
}
// -----------------------------------------------------< Empty >---
/**
* An <code>Empty</code> operation never cancels another operation and is never
* cancelled by any other operation.
*/
public static class Empty extends Operations.Empty implements CancelableOperation {
/**
* @return {@link ConsolidatingChangeLog.CancelableOperation#CANCEL_NONE}
*/
public int cancel(CancelableOperation other) throws RepositoryException {
return CANCEL_NONE;
}
}
/**
* Factory method for creating an {@link Empty Empty} operation.
* @return
*/
public static CancelableOperation empty() {
return new Empty();
}
// -----------------------------------------------------< AddNode >---
/**
* An <code>AddNode</code> operation is is cancelled by a
* {@link ConsolidatingChangeLog.CancelableOperations.Remove Remove} operation higher up the tree.
* The remove operation is also cancelled if it is targeted at the same node than this add
* operation.
*/
public static class AddNode extends Operations.AddNode implements CancelableOperation {
public AddNode(NodeId parentId, Name nodeName, Name nodetypeName, String uuid) {
super(parentId, nodeName, nodetypeName, uuid);
}
/**
* @return
* <ul>
* <li>{@link ConsolidatingChangeLog.CancelableOperation#CANCEL_BOTH CANCEL_BOTH} if
* <code>other</code> is an instance of
* {@link ConsolidatingChangeLog.CancelableOperations.Remove Remove} and has this node
* as target.</li>
* <li>{@link ConsolidatingChangeLog.CancelableOperation#CANCEL_THIS CANCEL_THIS} if
* <code>other</code> is an instance of
* {@link ConsolidatingChangeLog.CancelableOperations.Remove Remove} and has an node higher up
* the hierarchy as target.</li>
* <li>{@link ConsolidatingChangeLog.CancelableOperation#CANCEL_NONE CANCEL_NONE} otherwise.</li>
* </ul>
*/
public int cancel(CancelableOperation other) throws RepositoryException {
if (other instanceof Remove) {
Path thisPath = ConsolidatingChangeLog.getPath(parentId, nodeName);
Path otherPath = ConsolidatingChangeLog.getPath(((Remove) other).itemId);
if (thisPath == null || otherPath == null) {
return CANCEL_NONE;
}
if (thisPath.equals(otherPath)) {
return CANCEL_BOTH;
}
return (thisPath.isDescendantOf(otherPath))
? CANCEL_THIS
: CANCEL_NONE;
}
return CANCEL_NONE;
}
}
/**
* Factory method for creating an {@link AddNode AddNode} operation.
* @see Batch#addNode(NodeId, Name, Name, String)
*
* @param parentId
* @param nodeName
* @param nodetypeName
* @param uuid
* @return
*/
public static CancelableOperation addNode(NodeId parentId, Name nodeName, Name nodetypeName, String uuid) {
return new AddNode(parentId, nodeName, nodetypeName, uuid);
}
// ---------------------------------------------------< AddProperty >---
/**
* <code>AddProperty</code> operations might cancel with
* {@link ConsolidatingChangeLog.CancelableOperations.Remove Remove} and
* {@link ConsolidatingChangeLog.CancelableOperations.SetValue SetValue} operations.
*/
public static class AddProperty extends Operations.AddProperty implements CancelableOperation {
public AddProperty(NodeId parentId, Name propertyName, QValue value) {
super(parentId, propertyName, value);
}
public AddProperty(NodeId parentId, Name propertyName, QValue[] values) {
super(parentId, propertyName, values);
}
/**
* @return
* <ul>
* <li>{@link ConsolidatingChangeLog.CancelableOperation#CANCEL_BOTH CANCEL_BOTH} if
* <code>other</code> is an instance of
* {@link ConsolidatingChangeLog.CancelableOperations.Remove Remove} and has this property as
* target or if <code>other</code> is an instance of
* {@link ConsolidatingChangeLog.CancelableOperations.SetValue SetValue} for a value of
* <code>null</code> and has this property as target.</li>
* <li>{@link ConsolidatingChangeLog.CancelableOperation#CANCEL_THIS CANCEL_THIS} if
* <code>other</code> is an instance of
* {@link ConsolidatingChangeLog.CancelableOperations.Remove Remove} and has a node higher up
* the hierarchy as target.</li>
* <li>{@link ConsolidatingChangeLog.CancelableOperation#CANCEL_OTHER CANCEL_OTHER} if
* <code>other</code> is an instance of
* {@link ConsolidatingChangeLog.CancelableOperations.SetValue SetValue} and has this
* property as target.</li>
* <li>{@link ConsolidatingChangeLog.CancelableOperation#CANCEL_NONE CANCEL_NONE} otherwise.</li>
* </ul>
*/
public int cancel(CancelableOperation other) throws RepositoryException {
if (other instanceof Remove) {
Path thisPath = ConsolidatingChangeLog.getPath(parentId, propertyName);
Path otherPath = ConsolidatingChangeLog.getPath(((Remove) other).itemId);
if (thisPath == null || otherPath == null) {
return CANCEL_NONE;
}
if (thisPath.equals(otherPath)) {
return CANCEL_BOTH;
}
return (thisPath.isDescendantOf(otherPath))
? CANCEL_THIS
: CANCEL_NONE;
}
if (other instanceof SetValue) {
SetValue setValue = (SetValue) other;
Path thisPath = ConsolidatingChangeLog.getPath(parentId, propertyName);
Path otherPath = ConsolidatingChangeLog.getPath(setValue.propertyId);
if (thisPath == null || otherPath == null) {
return CANCEL_NONE;
}
if (thisPath.equals(otherPath)) {
if (!isMultivalued && setValue.values[0] == null) {
return CANCEL_BOTH;
}
else if (values.length == setValue.values.length) {
for (int k = 0; k < values.length; k++) {
if (!values[k].equals(setValue.values[k])) {
return CANCEL_NONE;
}
}
return CANCEL_OTHER;
}
}
}
return CANCEL_NONE;
}
}
/**
* Factory method for creating an {@link AddProperty AddProperty} operation.
*
* @see Batch#addProperty(NodeId, Name, QValue)
* @param parentId
* @param propertyName
* @param value
* @return
*/
public static CancelableOperation addProperty(NodeId parentId, Name propertyName, QValue value) {
return new AddProperty(parentId, propertyName, value);
}
/**
* Factory method for creating an {@link AddProperty AddProperty} operation.
*
* @see Batch#addProperty(NodeId, Name, QValue[])
* @param parentId
* @param propertyName
* @param values
* @return
*/
public static CancelableOperation addProperty(NodeId parentId, Name propertyName, QValue[] values) {
return new AddProperty(parentId, propertyName, values);
}
// ----------------------------------------------------------< Move >---
/**
* An <code>Move</code> operation never cancels another operation and is never
* cancelled by any other operation.
*/
public static class Move extends Operations.Move implements CancelableOperation {
public Move(NodeId srcNodeId, NodeId destParentNodeId, Name destName) {
super(srcNodeId, destParentNodeId, destName);
}
/**
* @return {@link ConsolidatingChangeLog.CancelableOperation#CANCEL_NONE CANCEL_NONE}
*/
public int cancel(CancelableOperation other) {
return CANCEL_NONE;
}
}
/**
* Factory method for creating a {@link Move Move} operation.
*
* @see Batch#move(NodeId, NodeId, Name)
* @param srcNodeId
* @param destParentNodeId
* @param destName
* @return
*/
public static CancelableOperation move(NodeId srcNodeId, NodeId destParentNodeId, Name destName) {
return new Move(srcNodeId, destParentNodeId, destName);
}
// --------------------------------------------------------< Remove >---
/**
* An <code>Remove</code> operation never cancels another operation and is never
* cancelled by any other operation.
*/
public static class Remove extends Operations.Remove implements CancelableOperation {
public Remove(ItemId itemId) {
super(itemId);
}
/**
* @return {@link ConsolidatingChangeLog.CancelableOperation#CANCEL_NONE CANCEL_NONE}
*/
public int cancel(CancelableOperation other) {
return CANCEL_NONE;
}
}
/**
* Factory method for creating a {@link Remove Remove} operation.
*
* @see Batch#move(NodeId, NodeId, Name)
* @param itemId
* @return
*/
public static CancelableOperation remove(ItemId itemId) {
return new Remove(itemId);
}
// -------------------------------------------------< Reorder Nodes >---
/**
* A <code>ReorderNodes</code> operation might cancel with
* {@link ConsolidatingChangeLog.CancelableOperations.Remove Remove} and
* {@link ConsolidatingChangeLog.CancelableOperations.ReorderNodes ReorderNodes} operations.
*/
public static class ReorderNodes extends Operations.ReorderNodes implements CancelableOperation {
public ReorderNodes(NodeId parentId, NodeId srcNodeId, NodeId beforeNodeId) {
super(parentId, srcNodeId, beforeNodeId);
}
/**
* @return
* <ul>
* <li>{@link ConsolidatingChangeLog.CancelableOperation#CANCEL_THIS CANCEL_THIS} if
* <code>other</code> is an instance of
* {@link ConsolidatingChangeLog.CancelableOperations.Remove Remove} and has an node higher up
* the hierarchy or this node as target. Or if <code>other</code> is an instance of
* {@link ConsolidatingChangeLog.CancelableOperations.ReorderNodes ReorderNodes} which
* has this node as target and neither <code>srcNodeId</code> nor <code>beforeNodeId</code>
* has same name siblings.</li>
* <li>{@link ConsolidatingChangeLog.CancelableOperation#CANCEL_NONE CANCEL_NONE} otherwise.</li>
* </ul>
*/
public int cancel(CancelableOperation other) throws RepositoryException {
if (other instanceof Remove) {
Path thisPath = ConsolidatingChangeLog.getPath(srcNodeId);
Path otherPath = ConsolidatingChangeLog.getPath(((Remove) other).itemId);
if (thisPath == null || otherPath == null) {
return CANCEL_NONE;
}
return thisPath.isDescendantOf(otherPath) || thisPath.equals(otherPath)
? CANCEL_THIS
: CANCEL_NONE;
}
if (other instanceof ReorderNodes) {
Path thisPath = ConsolidatingChangeLog.getPath(parentId);
Path otherPath = ConsolidatingChangeLog.getPath(((ReorderNodes) other).parentId);
if (thisPath == null || otherPath == null) {
return CANCEL_NONE;
}
return thisPath.equals(otherPath) && !hasSNS(srcNodeId) && !hasSNS(beforeNodeId)
? CANCEL_THIS
: CANCEL_NONE;
}
return CANCEL_NONE;
}
private boolean hasSNS(NodeId nodeId) {
if (nodeId != null) {
Path path = ConsolidatingChangeLog.getPath(nodeId);
return path != null && path.getIndex() > 1;
}
return false;
}
}
/**
* Factory method for creating a {@link ReorderNodes ReorderNodes} operation.
*
* @see Batch#reorderNodes(NodeId, NodeId, NodeId)
* @param parentId
* @param srcNodeId
* @param beforeNodeId
* @return
*/
public static CancelableOperation reorderNodes(NodeId parentId, NodeId srcNodeId, NodeId beforeNodeId) {
return new ReorderNodes(parentId, srcNodeId, beforeNodeId);
}
// -----------------------------------------------------< SetMixins >---
/**
* A <code>SetMixins</code> operation might cancel with
* {@link ConsolidatingChangeLog.CancelableOperations.Remove Remove} and
* {@link ConsolidatingChangeLog.CancelableOperations.SetMixins SetMixins} operations.
*/
public static class SetMixins extends Operations.SetMixins implements CancelableOperation {
public SetMixins(NodeId nodeId, Name[] mixinNodeTypeNames) {
super(nodeId, mixinNodeTypeNames);
}
/**
* @return
* <ul>
* <li>{@link ConsolidatingChangeLog.CancelableOperation#CANCEL_THIS CANCEL_THIS} if
* <code>other</code> is an instance of
* {@link ConsolidatingChangeLog.CancelableOperations.Remove Remove} and has an node higher up
* the hierarchy or this node as target. Or if <code>other</code> is an instance of
* {@link ConsolidatingChangeLog.CancelableOperations.SetMixins SetMixins} which has this node
* as target and has the same <code>mixinNodeTypeNames</code>.</li>
* <li>{@link ConsolidatingChangeLog.CancelableOperation#CANCEL_NONE CANCEL_NONE} otherwise.</li>
* </ul>
*/
public int cancel(CancelableOperation other) throws RepositoryException {
if (other instanceof Remove) {
Path thisPath = ConsolidatingChangeLog.getPath(nodeId);
Path otherPath = ConsolidatingChangeLog.getPath(((Remove) other).itemId);
if (thisPath == null || otherPath == null) {
return CANCEL_NONE;
}
return thisPath.isDescendantOf(otherPath) || thisPath.equals(otherPath)
? CANCEL_THIS
: CANCEL_NONE;
}
if (other instanceof SetMixins) {
SetMixins setMixin = (SetMixins) other;
if (mixinNodeTypeNames.length == setMixin.mixinNodeTypeNames.length) {
Path thisPath = ConsolidatingChangeLog.getPath(nodeId);
Path otherPath = ConsolidatingChangeLog.getPath(setMixin.nodeId);
if (thisPath == null || otherPath == null) {
return CANCEL_NONE;
}
if (thisPath.equals(otherPath)) {
for (int k = 0; k < mixinNodeTypeNames.length; k++) {
if (!mixinNodeTypeNames[k].equals(setMixin.mixinNodeTypeNames[k])) {
return CANCEL_NONE;
}
}
return CANCEL_THIS;
}
}
}
return CANCEL_NONE;
}
}
/**
* Factory method for creating a {@link SetMixins} operation.
*
* @see Batch#setMixins(NodeId, Name[])
* @param nodeId
* @param mixinNodeTypeNames
* @return
*/
public static CancelableOperation setMixins(NodeId nodeId, Name[] mixinNodeTypeNames) {
return new SetMixins(nodeId, mixinNodeTypeNames);
}
// -----------------------------------------------------< SetMixins >---
/**
* A <code>SetPrimaryType</code> operation might cancel with
* {@link ConsolidatingChangeLog.CancelableOperations.Remove Remove} and
* {@link ConsolidatingChangeLog.CancelableOperations.SetPrimaryType SetPrimaryType} operations.
*/
public static class SetPrimaryType extends Operations.SetPrimaryType implements CancelableOperation {
public SetPrimaryType(NodeId nodeId, Name primaryTypeName) {
super(nodeId, primaryTypeName);
}
/**
* @return
* <ul>
* <li>{@link ConsolidatingChangeLog.CancelableOperation#CANCEL_THIS CANCEL_THIS} if
* <code>other</code> is an instance of
* {@link ConsolidatingChangeLog.CancelableOperations.Remove Remove} and has an node higher up
* the hierarchy or this node as target. Or if <code>other</code> is an instance of
* {@link ConsolidatingChangeLog.CancelableOperations.SetMixins SetMixins} which has this node
* as target and has the same <code>mixinNodeTypeNames</code>.</li>
* <li>{@link ConsolidatingChangeLog.CancelableOperation#CANCEL_NONE CANCEL_NONE} otherwise.</li>
* </ul>
*/
public int cancel(CancelableOperation other) throws RepositoryException {
if (other instanceof Remove) {
Path thisPath = ConsolidatingChangeLog.getPath(nodeId);
Path otherPath = ConsolidatingChangeLog.getPath(((Remove) other).itemId);
if (thisPath == null || otherPath == null) {
return CANCEL_NONE;
}
return thisPath.isDescendantOf(otherPath) || thisPath.equals(otherPath)
? CANCEL_THIS
: CANCEL_NONE;
}
if (other instanceof SetPrimaryType) {
SetPrimaryType setPrimaryType = (SetPrimaryType) other;
if (primaryTypeName.equals(setPrimaryType.primaryTypeName)) {
Path thisPath = ConsolidatingChangeLog.getPath(nodeId);
Path otherPath = ConsolidatingChangeLog.getPath(setPrimaryType.nodeId);
if (thisPath == null || otherPath == null) {
return CANCEL_NONE;
}
if (thisPath.equals(otherPath)) {
return CANCEL_THIS;
}
}
}
return CANCEL_NONE;
}
}
/**
* Factory method for creating a {@link SetPrimaryType} operation.
*
* @see Batch#setPrimaryType(NodeId, Name)
* @param nodeId
* @param primaryTypeName
* @return
*/
public static CancelableOperation setPrimaryType(NodeId nodeId, Name primaryTypeName) {
return new SetPrimaryType(nodeId, primaryTypeName);
}
// ------------------------------------------------------< SetValue >---
/**
* A <code>SetValue</code> operation might cancel with
* {@link ConsolidatingChangeLog.CancelableOperations.Remove Remove} and
* {@link ConsolidatingChangeLog.CancelableOperations.SetValue SetValue} operations.
*/
public static class SetValue extends Operations.SetValue implements CancelableOperation {
public SetValue(PropertyId propertyId, QValue value) {
super(propertyId, value);
}
public SetValue(PropertyId propertyId, QValue[] values) {
super(propertyId, values);
}
/**
* @return
* <ul>
* <li>{@link ConsolidatingChangeLog.CancelableOperation#CANCEL_THIS CANCEL_THIS} if
* <code>other</code> is an instance of
* {@link ConsolidatingChangeLog.CancelableOperations.Remove Remove} and has an node higher up
* the hierarchy or this node as target. Or if <code>other</code> is an instance of
* {@link ConsolidatingChangeLog.CancelableOperations.SetValue SetValue} which has this
* property as target</li>
* <li>{@link ConsolidatingChangeLog.CancelableOperation#CANCEL_NONE CANCEL_NONE} otherwise.</li>
* </ul>
*/
public int cancel(CancelableOperation other) throws RepositoryException {
if (other instanceof Remove) {
Path thisPath = ConsolidatingChangeLog.getPath(propertyId);
Path otherPath = ConsolidatingChangeLog.getPath(((Remove) other).itemId);
if (thisPath == null || otherPath == null) {
return CANCEL_NONE;
}
return thisPath.isDescendantOf(otherPath) || thisPath.equals(otherPath)
? CANCEL_THIS
: CANCEL_NONE;
}
if (other instanceof SetValue) {
Path thisPath = ConsolidatingChangeLog.getPath(propertyId);
Path otherPath = ConsolidatingChangeLog.getPath(((SetValue) other).propertyId);
if (thisPath == null || otherPath == null) {
return CANCEL_NONE;
}
if (thisPath.equals(otherPath)) {
return CANCEL_THIS;
}
}
return CANCEL_NONE;
}
}
/**
* Factory method for creating a {@link SetValue SetValue} operation.
*
* @see Batch#setValue(PropertyId, QValue)
* @param propertyId
* @param value
* @return
*/
public static CancelableOperation setValue(PropertyId propertyId, QValue value) {
return new SetValue(propertyId, value);
}
/**
* Factory method for creating a {@link SetValue SetValue} operation.
*
* @see Batch#setValue(PropertyId, QValue[])
* @param propertyId
* @param values
* @return
*/
public static CancelableOperation setValue(PropertyId propertyId, QValue[] values) {
return new SetValue(propertyId, values);
}
//--------------------------------------------------------< SetTree >---
public static class SetTree extends Operations.SetTree implements CancelableOperation {
public SetTree(NodeId parentId, Tree contentTree) {
super(parentId, contentTree);
}
/**
* The cancellation only considers canceling the parent node, which corresponds
* to the policy node.
*/
public int cancel(CancelableOperation other) throws RepositoryException {
if (other instanceof Remove) {
Path thisPath = ConsolidatingChangeLog.getPath(parentId, tree.getName());
Path otherPath = ConsolidatingChangeLog.getPath(((Remove) other).itemId);
if (thisPath == null || otherPath == null) {
return CANCEL_NONE;
}
if (thisPath.equals(otherPath)) {
return CANCEL_BOTH;
}
return (thisPath.isDescendantOf(otherPath))
? CANCEL_THIS
: CANCEL_NONE;
}
return CANCEL_NONE;
}
}
/**
* Factory method for creating an {@link SetTree} operation.
* @see Batch#setTree(NodeId, Tree)
*
* @param parentId
* @param tree
* @return
*/
public static CancelableOperation setTree(NodeId parentId, Tree tree) {
return new SetTree(parentId, tree);
}
}
}
|
googleapis/google-cloud-java | 34,962 | java-discoveryengine/proto-google-cloud-discoveryengine-v1alpha/src/main/java/com/google/cloud/discoveryengine/v1alpha/SiteVerificationInfo.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/discoveryengine/v1alpha/site_search_engine.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.discoveryengine.v1alpha;
/**
*
*
* <pre>
* Verification information for target sites in advanced site search.
* </pre>
*
* Protobuf type {@code google.cloud.discoveryengine.v1alpha.SiteVerificationInfo}
*/
public final class SiteVerificationInfo extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.discoveryengine.v1alpha.SiteVerificationInfo)
SiteVerificationInfoOrBuilder {
private static final long serialVersionUID = 0L;
// Use SiteVerificationInfo.newBuilder() to construct.
private SiteVerificationInfo(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private SiteVerificationInfo() {
siteVerificationState_ = 0;
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new SiteVerificationInfo();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.discoveryengine.v1alpha.SiteSearchEngineProto
.internal_static_google_cloud_discoveryengine_v1alpha_SiteVerificationInfo_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.discoveryengine.v1alpha.SiteSearchEngineProto
.internal_static_google_cloud_discoveryengine_v1alpha_SiteVerificationInfo_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.discoveryengine.v1alpha.SiteVerificationInfo.class,
com.google.cloud.discoveryengine.v1alpha.SiteVerificationInfo.Builder.class);
}
/**
*
*
* <pre>
* Site verification state.
* </pre>
*
* Protobuf enum {@code
* google.cloud.discoveryengine.v1alpha.SiteVerificationInfo.SiteVerificationState}
*/
public enum SiteVerificationState implements com.google.protobuf.ProtocolMessageEnum {
/**
*
*
* <pre>
* Defaults to VERIFIED.
* </pre>
*
* <code>SITE_VERIFICATION_STATE_UNSPECIFIED = 0;</code>
*/
SITE_VERIFICATION_STATE_UNSPECIFIED(0),
/**
*
*
* <pre>
* Site ownership verified.
* </pre>
*
* <code>VERIFIED = 1;</code>
*/
VERIFIED(1),
/**
*
*
* <pre>
* Site ownership pending verification or verification failed.
* </pre>
*
* <code>UNVERIFIED = 2;</code>
*/
UNVERIFIED(2),
/**
*
*
* <pre>
* Site exempt from verification, e.g., a public website that opens to all.
* </pre>
*
* <code>EXEMPTED = 3;</code>
*/
EXEMPTED(3),
UNRECOGNIZED(-1),
;
/**
*
*
* <pre>
* Defaults to VERIFIED.
* </pre>
*
* <code>SITE_VERIFICATION_STATE_UNSPECIFIED = 0;</code>
*/
public static final int SITE_VERIFICATION_STATE_UNSPECIFIED_VALUE = 0;
/**
*
*
* <pre>
* Site ownership verified.
* </pre>
*
* <code>VERIFIED = 1;</code>
*/
public static final int VERIFIED_VALUE = 1;
/**
*
*
* <pre>
* Site ownership pending verification or verification failed.
* </pre>
*
* <code>UNVERIFIED = 2;</code>
*/
public static final int UNVERIFIED_VALUE = 2;
/**
*
*
* <pre>
* Site exempt from verification, e.g., a public website that opens to all.
* </pre>
*
* <code>EXEMPTED = 3;</code>
*/
public static final int EXEMPTED_VALUE = 3;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static SiteVerificationState valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static SiteVerificationState forNumber(int value) {
switch (value) {
case 0:
return SITE_VERIFICATION_STATE_UNSPECIFIED;
case 1:
return VERIFIED;
case 2:
return UNVERIFIED;
case 3:
return EXEMPTED;
default:
return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<SiteVerificationState>
internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap<SiteVerificationState>
internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap<SiteVerificationState>() {
public SiteVerificationState findValueByNumber(int number) {
return SiteVerificationState.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalStateException(
"Can't get the descriptor of an unrecognized enum value.");
}
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() {
return com.google.cloud.discoveryengine.v1alpha.SiteVerificationInfo.getDescriptor()
.getEnumTypes()
.get(0);
}
private static final SiteVerificationState[] VALUES = values();
public static SiteVerificationState valueOf(
com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private SiteVerificationState(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:google.cloud.discoveryengine.v1alpha.SiteVerificationInfo.SiteVerificationState)
}
private int bitField0_;
public static final int SITE_VERIFICATION_STATE_FIELD_NUMBER = 1;
private int siteVerificationState_ = 0;
/**
*
*
* <pre>
* Site verification state indicating the ownership and validity.
* </pre>
*
* <code>
* .google.cloud.discoveryengine.v1alpha.SiteVerificationInfo.SiteVerificationState site_verification_state = 1;
* </code>
*
* @return The enum numeric value on the wire for siteVerificationState.
*/
@java.lang.Override
public int getSiteVerificationStateValue() {
return siteVerificationState_;
}
/**
*
*
* <pre>
* Site verification state indicating the ownership and validity.
* </pre>
*
* <code>
* .google.cloud.discoveryengine.v1alpha.SiteVerificationInfo.SiteVerificationState site_verification_state = 1;
* </code>
*
* @return The siteVerificationState.
*/
@java.lang.Override
public com.google.cloud.discoveryengine.v1alpha.SiteVerificationInfo.SiteVerificationState
getSiteVerificationState() {
com.google.cloud.discoveryengine.v1alpha.SiteVerificationInfo.SiteVerificationState result =
com.google.cloud.discoveryengine.v1alpha.SiteVerificationInfo.SiteVerificationState
.forNumber(siteVerificationState_);
return result == null
? com.google.cloud.discoveryengine.v1alpha.SiteVerificationInfo.SiteVerificationState
.UNRECOGNIZED
: result;
}
public static final int VERIFY_TIME_FIELD_NUMBER = 2;
private com.google.protobuf.Timestamp verifyTime_;
/**
*
*
* <pre>
* Latest site verification time.
* </pre>
*
* <code>.google.protobuf.Timestamp verify_time = 2;</code>
*
* @return Whether the verifyTime field is set.
*/
@java.lang.Override
public boolean hasVerifyTime() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Latest site verification time.
* </pre>
*
* <code>.google.protobuf.Timestamp verify_time = 2;</code>
*
* @return The verifyTime.
*/
@java.lang.Override
public com.google.protobuf.Timestamp getVerifyTime() {
return verifyTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : verifyTime_;
}
/**
*
*
* <pre>
* Latest site verification time.
* </pre>
*
* <code>.google.protobuf.Timestamp verify_time = 2;</code>
*/
@java.lang.Override
public com.google.protobuf.TimestampOrBuilder getVerifyTimeOrBuilder() {
return verifyTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : verifyTime_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (siteVerificationState_
!= com.google.cloud.discoveryengine.v1alpha.SiteVerificationInfo.SiteVerificationState
.SITE_VERIFICATION_STATE_UNSPECIFIED
.getNumber()) {
output.writeEnum(1, siteVerificationState_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(2, getVerifyTime());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (siteVerificationState_
!= com.google.cloud.discoveryengine.v1alpha.SiteVerificationInfo.SiteVerificationState
.SITE_VERIFICATION_STATE_UNSPECIFIED
.getNumber()) {
size += com.google.protobuf.CodedOutputStream.computeEnumSize(1, siteVerificationState_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getVerifyTime());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.discoveryengine.v1alpha.SiteVerificationInfo)) {
return super.equals(obj);
}
com.google.cloud.discoveryengine.v1alpha.SiteVerificationInfo other =
(com.google.cloud.discoveryengine.v1alpha.SiteVerificationInfo) obj;
if (siteVerificationState_ != other.siteVerificationState_) return false;
if (hasVerifyTime() != other.hasVerifyTime()) return false;
if (hasVerifyTime()) {
if (!getVerifyTime().equals(other.getVerifyTime())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + SITE_VERIFICATION_STATE_FIELD_NUMBER;
hash = (53 * hash) + siteVerificationState_;
if (hasVerifyTime()) {
hash = (37 * hash) + VERIFY_TIME_FIELD_NUMBER;
hash = (53 * hash) + getVerifyTime().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.discoveryengine.v1alpha.SiteVerificationInfo parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.discoveryengine.v1alpha.SiteVerificationInfo parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1alpha.SiteVerificationInfo parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.discoveryengine.v1alpha.SiteVerificationInfo parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1alpha.SiteVerificationInfo parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.discoveryengine.v1alpha.SiteVerificationInfo parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1alpha.SiteVerificationInfo parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.discoveryengine.v1alpha.SiteVerificationInfo parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1alpha.SiteVerificationInfo parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.discoveryengine.v1alpha.SiteVerificationInfo parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1alpha.SiteVerificationInfo parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.discoveryengine.v1alpha.SiteVerificationInfo parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.discoveryengine.v1alpha.SiteVerificationInfo prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Verification information for target sites in advanced site search.
* </pre>
*
* Protobuf type {@code google.cloud.discoveryengine.v1alpha.SiteVerificationInfo}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.discoveryengine.v1alpha.SiteVerificationInfo)
com.google.cloud.discoveryengine.v1alpha.SiteVerificationInfoOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.discoveryengine.v1alpha.SiteSearchEngineProto
.internal_static_google_cloud_discoveryengine_v1alpha_SiteVerificationInfo_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.discoveryengine.v1alpha.SiteSearchEngineProto
.internal_static_google_cloud_discoveryengine_v1alpha_SiteVerificationInfo_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.discoveryengine.v1alpha.SiteVerificationInfo.class,
com.google.cloud.discoveryengine.v1alpha.SiteVerificationInfo.Builder.class);
}
// Construct using com.google.cloud.discoveryengine.v1alpha.SiteVerificationInfo.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getVerifyTimeFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
siteVerificationState_ = 0;
verifyTime_ = null;
if (verifyTimeBuilder_ != null) {
verifyTimeBuilder_.dispose();
verifyTimeBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.discoveryengine.v1alpha.SiteSearchEngineProto
.internal_static_google_cloud_discoveryengine_v1alpha_SiteVerificationInfo_descriptor;
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1alpha.SiteVerificationInfo
getDefaultInstanceForType() {
return com.google.cloud.discoveryengine.v1alpha.SiteVerificationInfo.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1alpha.SiteVerificationInfo build() {
com.google.cloud.discoveryengine.v1alpha.SiteVerificationInfo result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1alpha.SiteVerificationInfo buildPartial() {
com.google.cloud.discoveryengine.v1alpha.SiteVerificationInfo result =
new com.google.cloud.discoveryengine.v1alpha.SiteVerificationInfo(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.discoveryengine.v1alpha.SiteVerificationInfo result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.siteVerificationState_ = siteVerificationState_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.verifyTime_ = verifyTimeBuilder_ == null ? verifyTime_ : verifyTimeBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.discoveryengine.v1alpha.SiteVerificationInfo) {
return mergeFrom((com.google.cloud.discoveryengine.v1alpha.SiteVerificationInfo) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.discoveryengine.v1alpha.SiteVerificationInfo other) {
if (other
== com.google.cloud.discoveryengine.v1alpha.SiteVerificationInfo.getDefaultInstance())
return this;
if (other.siteVerificationState_ != 0) {
setSiteVerificationStateValue(other.getSiteVerificationStateValue());
}
if (other.hasVerifyTime()) {
mergeVerifyTime(other.getVerifyTime());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8:
{
siteVerificationState_ = input.readEnum();
bitField0_ |= 0x00000001;
break;
} // case 8
case 18:
{
input.readMessage(getVerifyTimeFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private int siteVerificationState_ = 0;
/**
*
*
* <pre>
* Site verification state indicating the ownership and validity.
* </pre>
*
* <code>
* .google.cloud.discoveryengine.v1alpha.SiteVerificationInfo.SiteVerificationState site_verification_state = 1;
* </code>
*
* @return The enum numeric value on the wire for siteVerificationState.
*/
@java.lang.Override
public int getSiteVerificationStateValue() {
return siteVerificationState_;
}
/**
*
*
* <pre>
* Site verification state indicating the ownership and validity.
* </pre>
*
* <code>
* .google.cloud.discoveryengine.v1alpha.SiteVerificationInfo.SiteVerificationState site_verification_state = 1;
* </code>
*
* @param value The enum numeric value on the wire for siteVerificationState to set.
* @return This builder for chaining.
*/
public Builder setSiteVerificationStateValue(int value) {
siteVerificationState_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Site verification state indicating the ownership and validity.
* </pre>
*
* <code>
* .google.cloud.discoveryengine.v1alpha.SiteVerificationInfo.SiteVerificationState site_verification_state = 1;
* </code>
*
* @return The siteVerificationState.
*/
@java.lang.Override
public com.google.cloud.discoveryengine.v1alpha.SiteVerificationInfo.SiteVerificationState
getSiteVerificationState() {
com.google.cloud.discoveryengine.v1alpha.SiteVerificationInfo.SiteVerificationState result =
com.google.cloud.discoveryengine.v1alpha.SiteVerificationInfo.SiteVerificationState
.forNumber(siteVerificationState_);
return result == null
? com.google.cloud.discoveryengine.v1alpha.SiteVerificationInfo.SiteVerificationState
.UNRECOGNIZED
: result;
}
/**
*
*
* <pre>
* Site verification state indicating the ownership and validity.
* </pre>
*
* <code>
* .google.cloud.discoveryengine.v1alpha.SiteVerificationInfo.SiteVerificationState site_verification_state = 1;
* </code>
*
* @param value The siteVerificationState to set.
* @return This builder for chaining.
*/
public Builder setSiteVerificationState(
com.google.cloud.discoveryengine.v1alpha.SiteVerificationInfo.SiteVerificationState value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
siteVerificationState_ = value.getNumber();
onChanged();
return this;
}
/**
*
*
* <pre>
* Site verification state indicating the ownership and validity.
* </pre>
*
* <code>
* .google.cloud.discoveryengine.v1alpha.SiteVerificationInfo.SiteVerificationState site_verification_state = 1;
* </code>
*
* @return This builder for chaining.
*/
public Builder clearSiteVerificationState() {
bitField0_ = (bitField0_ & ~0x00000001);
siteVerificationState_ = 0;
onChanged();
return this;
}
private com.google.protobuf.Timestamp verifyTime_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Timestamp,
com.google.protobuf.Timestamp.Builder,
com.google.protobuf.TimestampOrBuilder>
verifyTimeBuilder_;
/**
*
*
* <pre>
* Latest site verification time.
* </pre>
*
* <code>.google.protobuf.Timestamp verify_time = 2;</code>
*
* @return Whether the verifyTime field is set.
*/
public boolean hasVerifyTime() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Latest site verification time.
* </pre>
*
* <code>.google.protobuf.Timestamp verify_time = 2;</code>
*
* @return The verifyTime.
*/
public com.google.protobuf.Timestamp getVerifyTime() {
if (verifyTimeBuilder_ == null) {
return verifyTime_ == null
? com.google.protobuf.Timestamp.getDefaultInstance()
: verifyTime_;
} else {
return verifyTimeBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Latest site verification time.
* </pre>
*
* <code>.google.protobuf.Timestamp verify_time = 2;</code>
*/
public Builder setVerifyTime(com.google.protobuf.Timestamp value) {
if (verifyTimeBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
verifyTime_ = value;
} else {
verifyTimeBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Latest site verification time.
* </pre>
*
* <code>.google.protobuf.Timestamp verify_time = 2;</code>
*/
public Builder setVerifyTime(com.google.protobuf.Timestamp.Builder builderForValue) {
if (verifyTimeBuilder_ == null) {
verifyTime_ = builderForValue.build();
} else {
verifyTimeBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Latest site verification time.
* </pre>
*
* <code>.google.protobuf.Timestamp verify_time = 2;</code>
*/
public Builder mergeVerifyTime(com.google.protobuf.Timestamp value) {
if (verifyTimeBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& verifyTime_ != null
&& verifyTime_ != com.google.protobuf.Timestamp.getDefaultInstance()) {
getVerifyTimeBuilder().mergeFrom(value);
} else {
verifyTime_ = value;
}
} else {
verifyTimeBuilder_.mergeFrom(value);
}
if (verifyTime_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Latest site verification time.
* </pre>
*
* <code>.google.protobuf.Timestamp verify_time = 2;</code>
*/
public Builder clearVerifyTime() {
bitField0_ = (bitField0_ & ~0x00000002);
verifyTime_ = null;
if (verifyTimeBuilder_ != null) {
verifyTimeBuilder_.dispose();
verifyTimeBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Latest site verification time.
* </pre>
*
* <code>.google.protobuf.Timestamp verify_time = 2;</code>
*/
public com.google.protobuf.Timestamp.Builder getVerifyTimeBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getVerifyTimeFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Latest site verification time.
* </pre>
*
* <code>.google.protobuf.Timestamp verify_time = 2;</code>
*/
public com.google.protobuf.TimestampOrBuilder getVerifyTimeOrBuilder() {
if (verifyTimeBuilder_ != null) {
return verifyTimeBuilder_.getMessageOrBuilder();
} else {
return verifyTime_ == null
? com.google.protobuf.Timestamp.getDefaultInstance()
: verifyTime_;
}
}
/**
*
*
* <pre>
* Latest site verification time.
* </pre>
*
* <code>.google.protobuf.Timestamp verify_time = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Timestamp,
com.google.protobuf.Timestamp.Builder,
com.google.protobuf.TimestampOrBuilder>
getVerifyTimeFieldBuilder() {
if (verifyTimeBuilder_ == null) {
verifyTimeBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Timestamp,
com.google.protobuf.Timestamp.Builder,
com.google.protobuf.TimestampOrBuilder>(
getVerifyTime(), getParentForChildren(), isClean());
verifyTime_ = null;
}
return verifyTimeBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.discoveryengine.v1alpha.SiteVerificationInfo)
}
// @@protoc_insertion_point(class_scope:google.cloud.discoveryengine.v1alpha.SiteVerificationInfo)
private static final com.google.cloud.discoveryengine.v1alpha.SiteVerificationInfo
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.discoveryengine.v1alpha.SiteVerificationInfo();
}
public static com.google.cloud.discoveryengine.v1alpha.SiteVerificationInfo getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<SiteVerificationInfo> PARSER =
new com.google.protobuf.AbstractParser<SiteVerificationInfo>() {
@java.lang.Override
public SiteVerificationInfo parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<SiteVerificationInfo> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<SiteVerificationInfo> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1alpha.SiteVerificationInfo getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.